You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 16:59:52 +08:00
Compare commits
121 Commits
release-py
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
9e37495493 | ||
|
77b93c5f21 | ||
|
0b198ee881 | ||
|
9e0c65881d | ||
|
c796d6a799 | ||
|
3892fb533a | ||
|
2ea7487ca7 | ||
|
d4f6cec3d0 | ||
|
b1705e283d | ||
|
eee751e22a | ||
|
2b0fefb95f | ||
|
1d7a3c6444 | ||
|
e7778f83f2 | ||
|
b51039ac1e | ||
|
1a627ba207 | ||
|
f73f0ba41c | ||
|
114f979555 | ||
|
ea75bcf47e | ||
|
6c6dcab857 | ||
|
0654aed6c8 | ||
|
7b38d2f1f8 | ||
|
dfbd60231b | ||
|
8b67f2ccd0 | ||
|
3447ca0767 | ||
|
aadea7224d | ||
|
1e858efafd | ||
|
da7421da1c | ||
|
ce88a72ea1 | ||
|
96ca68a6fe | ||
|
147b6e1cfe | ||
|
7725b8e7de | ||
|
d7b12f4da1 | ||
|
62ddbe320d | ||
|
c7b9e54e59 | ||
|
a694601264 | ||
|
3003070acb | ||
|
19d6dedcf5 | ||
|
51ad3fb36e | ||
|
f017acce21 | ||
|
5bef5683e4 | ||
|
4e1467adc8 | ||
|
7cdf0abb43 | ||
|
9b336251a7 | ||
|
7844456e1e | ||
|
e06f88043f | ||
|
356ea6c770 | ||
|
8fc3fd146f | ||
|
4d58438515 | ||
|
f7bfe3f7b2 | ||
|
ce5066bddb | ||
|
c54a47b15f | ||
|
d1e02afb4b | ||
|
93f18e2449 | ||
|
f4ceb6304d | ||
|
783e62f3ca | ||
|
503039ab51 | ||
|
8393064136 | ||
|
c38dc61021 | ||
|
45782bbb39 | ||
|
4c9cd5657e | ||
|
dc627d13b8 | ||
|
ddc3489991 | ||
|
5b24c20331 | ||
|
8bb01143d8 | ||
|
bb9b3ac9cf | ||
|
05ac60ea74 | ||
|
a9635da96a | ||
|
e790cb75fd | ||
|
348afeebbf | ||
|
d138a01bf1 | ||
|
9e8e4f54c7 | ||
|
a06a5e1cd8 | ||
|
1048f6a964 | ||
|
7fed237077 | ||
|
8b816ead0d | ||
|
300d387349 | ||
|
27ab6fe2f5 | ||
|
2e164763eb | ||
|
d332bde104 | ||
|
0893652943 | ||
|
6efd7afda3 | ||
|
ee3202779a | ||
|
6888553773 | ||
|
9c072a6a42 | ||
|
277ad36566 | ||
|
af3d46b35c | ||
|
e1bc0c5cd6 | ||
|
5a519ed36a | ||
|
0f489672b9 | ||
|
b7d8cbfaf5 | ||
|
af10f99776 | ||
|
0cbafa6e3a | ||
|
4afaee2a36 | ||
|
daea3c348c | ||
|
bf45260588 | ||
|
34a356d237 | ||
|
d9c1374a59 | ||
|
2e05137f2b | ||
|
267ecda070 | ||
|
7e89839777 | ||
|
c7f8edd5ef | ||
|
6a991833a3 | ||
|
28ee3f1257 | ||
|
e9588e56e2 | ||
|
7b2217fda4 | ||
|
5ca219f3d3 | ||
|
b733a1b036 | ||
|
4615cda03f | ||
|
eb92418224 | ||
|
844221cd43 | ||
|
df8d253f78 | ||
|
89b42e3696 | ||
|
22e5a4a283 | ||
|
61810172d1 | ||
|
7c299fbf37 | ||
|
da695115b5 | ||
|
f1d9e194fe | ||
|
e727a437ea | ||
|
9a3e11a957 | ||
|
966a4bc7dc | ||
|
658c8b4be7 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,4 +17,5 @@
|
||||
__pycache__
|
||||
build
|
||||
/.venv*
|
||||
/.idea
|
||||
/.idea
|
||||
/.hypothesis
|
||||
|
@@ -6,7 +6,7 @@ python:
|
||||
- '2.7' # this is a cheat here because travis doesn't do 2.4-2.6
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -e .
|
||||
- pip install -r requirements-dev.txt
|
||||
|
||||
script:
|
||||
|
665
ChangeLog
665
ChangeLog
@@ -1,8 +1,665 @@
|
||||
2017-10-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse24.py, uncompyle6/scanners/scanner3.py:
|
||||
Misc bugs
|
||||
|
||||
2017-10-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/branching/02_ifelse_lambda.py: One more test
|
||||
|
||||
2017-10-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .gitignore, pytest/test_grammar.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse26.py, uncompyle6/parsers/parse27.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/semantics/consts.py,
|
||||
uncompyle6/semantics/pysource.py: Sync with master
|
||||
|
||||
2017-10-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse24.py,
|
||||
uncompyle6/parsers/parse26.py: handle newer parser reduction
|
||||
behavior
|
||||
|
||||
2017-10-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Go over table-semantics
|
||||
description yet again
|
||||
|
||||
2017-10-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse3.py: Sync
|
||||
with master
|
||||
|
||||
2017-09-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Annotation field can be unicode... When deparsing Python 3.x from Python 2.
|
||||
|
||||
2017-09-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Require xdis 3.6.0 or greater
|
||||
|
||||
2017-09-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 0654aed6c823d0bb20abdc866481ca5950db72f7 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Sep 21 11:29:17 2017 -0400
|
||||
|
||||
2017-09-21 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_pysource.py, uncompyle6/semantics/consts.py,
|
||||
uncompyle6/semantics/fragments.py, uncompyle6/semantics/pysource.py:
|
||||
Unit test for format-specifiers
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Tidy pysource and fragments
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/consts.py: Tidy/regularize table entry
|
||||
formatting
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pythonlib.py, uncompyle6/semantics/pysource.py: small
|
||||
fixes... test_pythonlib.py: it is sys.exit not exit pysource.py: restore node
|
||||
type on async_call function
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_pysource.py, uncompyle6/semantics/pysource.py: Start
|
||||
pysource unit test
|
||||
|
||||
2017-09-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: emgine -> template_engine
|
||||
|
||||
2017-08-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 356ea6c7705a557cb3e725d1aca8589dd62b5cdf Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Aug 31 09:50:48 2017 -0400
|
||||
|
||||
2017-08-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 4d5843851543bfb3c97fc3c49036f1a971fc1d66 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Aug 31 08:53:58 2017 -0400
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* Makefile: 3.7 support
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit c54a47b15f85be50d2278aa79fd514eb08580e65 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Tue Aug 15 10:47:12 2017 -0400
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, pytest/validate.py, uncompyle6/parser.py,
|
||||
uncompyle6/scanner.py: Misc cleanups... remove code now in xdis require at least xdis 3.5.4 PyPy tolerance
|
||||
in validate testing
|
||||
|
||||
2017-08-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, README.rst, __pkginfo__.py, pytest/test_basic.py,
|
||||
uncompyle6/parser.py, uncompyle6/scanner.py: Allow version to be
|
||||
string... in get_python_parser and get_scanner
|
||||
|
||||
2017-08-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_basic.py, uncompyle6/parser.py, uncompyle6/scanner.py:
|
||||
Allow 3-part version string lookups, e.g 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
'2.7.13', accept that in looking up either a scanner or a parser.
|
||||
|
||||
2017-08-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 503039ab51f004cca27a9da43ff22b031cc486dc Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Aug 10 09:41:48 2017 -0400
|
||||
|
||||
2017-08-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, README.rst, __pkginfo__.py,
|
||||
uncompyle6/semantics/consts.py, uncompyle6/version.py: Get ready for
|
||||
release 2.11.3 need xdis 3.5.1 for now. Adjust for xdis "is-not" which we need as
|
||||
"is not"
|
||||
|
||||
2017-08-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/consts.py: xdis "is not" is now "is-not"
|
||||
|
||||
2017-08-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog: Get ready for release 2.11.3
|
||||
|
||||
2017-08-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit dc627d13b8455ded4bf708a596bb466f9df9bf7b Author: rocky
|
||||
<rb@dustyfeet.com> Date: Wed Aug 9 21:19:30 2017 -0400
|
||||
|
||||
2017-08-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_deparse.py, pytest/test_docstring.py,
|
||||
pytest/test_fjt.py, pytest/test_single_compile.py,
|
||||
pytest/validate.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner30.py: Python 2.4 comptiability and ... exception match -> exception-match
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Bump xdis
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Remove six from python 2.4/2.5
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Revert commit to wrong branch
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Remove six from Python-2.4/2.5 package
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: in xdis "exception match" is now
|
||||
"exception-match"
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Python 2.4 doesn't do six
|
||||
|
||||
2017-08-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/validate.py, test/dis-compare.py,
|
||||
test/simple-uncompyle-code-test.py: Python 2.4 compatibility
|
||||
|
||||
2017-07-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/scanners/scanner3.py, uncompyle6/scanners/scanner30.py:
|
||||
xdis's "exception match" is now "exception-match"
|
||||
|
||||
2017-07-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: xdis 3.5.1 is botched?
|
||||
|
||||
2017-07-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Use newer xdis
|
||||
|
||||
2017-07-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Fixes issue #124
|
||||
|
||||
2017-07-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* HISTORY.md: History updates
|
||||
|
||||
2017-07-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: RsT doc formatting
|
||||
|
||||
2017-07-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, HOW-TO-REPORT-A-BUG.md, NEWS, uncompyle6/version.py:
|
||||
Get ready for release 2.11.2
|
||||
|
||||
2017-07-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanner.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner26.py,
|
||||
uncompyle6/scanners/scanner3.py, uncompyle6/scanners/scanner30.py,
|
||||
uncompyle6/scanners/tok.py: Use xdis 3.5.0's opcode sets
|
||||
|
||||
2017-07-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pyenvlib.py, uncompyle6/scanners/pypy32.py,
|
||||
uncompyle6/scanners/pypy35.py, uncompyle6/scanners/scanner15.py,
|
||||
uncompyle6/scanners/scanner32.py, uncompyle6/scanners/scanner34.py,
|
||||
uncompyle6/scanners/scanner35.py, uncompyle6/scanners/scanner36.py:
|
||||
Start supporting Pypy 3.5 (5.7.1-beta)
|
||||
|
||||
2017-07-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug26/03_loop_if_cf.py,
|
||||
uncompyle6/parsers/parse26.py: Loops in Python 2.4-2.6 loop
|
||||
come_from Looks like Python 2.4-2.6 may have a COME_FROM(_LOOP) before the
|
||||
jump_back. Fixes Issue #123
|
||||
|
||||
2017-06-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : Work around not having real flow-control analysis
|
||||
|
||||
2017-06-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: A guard against badly
|
||||
formated bytecode
|
||||
|
||||
2017-06-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 9c072a6a423d8379712296dbcd499c772ba7ef59 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Sun Jun 25 18:44:50 2017 -0400
|
||||
|
||||
2017-06-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.11.1
|
||||
|
||||
2017-06-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanner.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner30.py, uncompyle6/semantics/pysource.py:
|
||||
Use xdis' instruction offset calculation fns.. next_offset, op_size, has_argument
|
||||
|
||||
2017-06-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Python 2 sometimes need
|
||||
str->uncode in writing?
|
||||
|
||||
2017-06-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Allow deparsed out to be str as
|
||||
well as unicode
|
||||
|
||||
2017-06-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/make_function.py: More merge fixups from master
|
||||
|
||||
2017-06-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit af10f99776b142c44fb4507033fb3220b5f57910 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Sun Jun 18 15:22:27 2017 -0400
|
||||
|
||||
2017-06-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Adjust nodeInfo if it is a
|
||||
Token
|
||||
|
||||
2017-06-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Add nonterminal node in
|
||||
extractInfo
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/make_function.py: Fragment tag more expressions Revise make_function3 comment wrt args and kwargs
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Fragment tag array subscripts
|
||||
|
||||
2017-06-10 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Create README.rst
|
||||
|
||||
2017-06-10 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Create README.rst
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Set YIELD_VALUE offset in a
|
||||
<yield> expr
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Python 3.2 MAKE_FUNCTION
|
||||
again.. Was handling bug32/01_named_and_kwargs.py wrong again
|
||||
|
||||
2017-06-09 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #119 from rocky/scan-longconstant Simplify access to L65536 ...
|
||||
|
||||
2017-06-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Attempt to document the
|
||||
MAKE_FUNCTION/MAKE_LAMBDA mess... in Python 3.0+
|
||||
|
||||
2017-06-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Correct make_function3 for
|
||||
Pytohn 3.2
|
||||
|
||||
2017-06-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Disable "continue" removal in
|
||||
pysource.py "continue" could be the only statement and then removing it might
|
||||
lead to a dangling "else".
|
||||
|
||||
2017-06-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Mark "pass" offsets. Start routine to find previous node.
|
||||
|
||||
2017-06-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/semantics/fragments.py:
|
||||
Remove hacky fragments try fixup... hacky call_function code is also not needed or will be reinstated
|
||||
properly. Better grammar structure for Python 3.6 call_function.
|
||||
|
||||
2017-06-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner36.py: BUILD_{MAP,TUPLE}_UNPACK &
|
||||
CALL_FUNCTION_EX_KW... Bang on these in 3.6. Not totally succesfull right now. In fact a
|
||||
regression on one of the test cases
|
||||
|
||||
2017-06-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Important fragments bug fix... start, finish that had been adjusted wasn't getting reflected in
|
||||
final returned deparsed.offsets dictionary. Redo keeping API
|
||||
compatibility, i.e we still use namedtuple NodeInfo.
|
||||
|
||||
2017-06-04 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py:
|
||||
Python 3.5 *args with kwargs handling. 3.5 is a snowflake here. Thank you, Python. Fully fixes Issue 95. 3.6 is broken on this source, but for a *different* reason. Sigh.
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst, __pkginfo__.py,
|
||||
test/simple_source/bug35/04_CALL_FUNCTION_VAR_KW.py,
|
||||
uncompyle6/semantics/fragments.py: Small changes. fragment tag EXEC_STMT
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/validate.py: 2.4 doesn't do six
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* appveyor.yml: Nope it (appveyor) doesn't.
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, appveyor.yml: Administrivia See if appveyor will handle 2.5
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 7c299fbf3777c452d6a10075964961783f510699 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Sat Jun 3 05:38:05 2017 -0400
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: We need six
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst, circle.yml, requirements-dev.txt: Go over
|
||||
administrivia
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.10.1
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Fragment bugs fragment.py: * deparse_code_aorund_offset: was sometimes returning the wrong type * capture function name offset * lint imports pysource.py: use a clearer variable name
|
||||
|
||||
2017-06-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Track changes in ifelstmtr.. in fragments from pysource
|
||||
|
||||
2017-05-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_function_call.py: No decorators in Python < 2.6
|
||||
|
||||
2017-05-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit ad98fae3d4b0b83f65b15da8201e33c0ee6dab17 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Tue May 30 01:26:52 2017 -0400
|
||||
|
||||
2017-05-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Python 3.6 makefunction
|
||||
handling for fragments
|
||||
|
||||
2017-05-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Fix up 3.6 unmapexpr
|
||||
|
||||
2017-05-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Fix up retreiving "async"
|
||||
property on 3.6
|
||||
|
||||
2017-05-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Fix bug in a 3.6 class name.
|
||||
|
||||
2017-05-23 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Add fuzzy offset deparse lookup
|
||||
|
||||
2017-05-21 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Correct EXTENDED_ARG handling on
|
||||
Python 3.6... where it can appear several times and xdis may handle it as well.
|
||||
It possibly in other versions bug since EXTENDED_ARG is used so
|
||||
rarely there because it has such a high value 1<<16, it's hard to
|
||||
test and determine that.
|
||||
|
||||
2017-05-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Worse results. Revert some of
|
||||
the last changes
|
||||
|
||||
2017-05-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py:
|
||||
More explicit about 3.5 UNMAP_PACK Have to reduce 3.5 bytecode testing for now, code is more solid.
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner3.py: Simplify EXTENDED_ARG on 3.x We largely remove them and fold them itno the next op.
|
||||
MAKE_FUNCTION though before 3.6 is an exception as that indicates an
|
||||
annotated function
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner26.py: EXTENDED_ARG is implemented in
|
||||
2.6
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/expression/06_huge_list.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py: Fix
|
||||
EXTENDED_ARG for long lists, sets, maps
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Another attempt at getting
|
||||
get_target() correct
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Bug in pypy JUMP_IF_NOT_DEBUG
|
||||
handling
|
||||
|
||||
2017-05-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse36.py, uncompyle6/scanners/scanner3.py:
|
||||
EXTENDED_ARG handling... get_target() wasn't taking into account EXTENDED_ARG before opcode. This is mostly relevant in Python 3.6 where the max size before
|
||||
needing EXTENDED_ARG has been reduced to 256, but theoretically
|
||||
possible in earlier versions.
|
||||
|
||||
2017-05-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Enforce using xdis >=3.3.1 .. to pick up bug fixes to 3.6 in xdis
|
||||
|
||||
2017-05-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner3.py: Small changes.... * __pkginfo__.py: Need spark parser 1.6.1 for corrected
|
||||
remove_rules() fn * parser36.py: remove replaced Python3 rules * scanner3.py: corrected comment. Thanks to moagstar here. *
|
||||
|
||||
2017-05-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse36.py: Fix broken CI on 3.6... Another grammar rule replacing SETUP_LOOP with setup_loop
|
||||
|
||||
2017-05-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse36.py: More EXTENDED_ARGS on 3.6
|
||||
|
||||
2017-05-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse36.py: extend use of EXTENDED_ARGS in 3.6 switching to a wordcode seems to have made opcode fields smaller so
|
||||
we need EXTENDED_ARG more?
|
||||
|
||||
2017-05-16 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse36.py, uncompyle6/semantics/pysource.py:
|
||||
Allow LOAD_CONST EXTENDED_ARG
|
||||
|
||||
2017-05-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Reinstate 3.6 listcomp rule
|
||||
|
||||
2017-05-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Bang on 3.6 MAKE_FUNCTION some more
|
||||
|
||||
2017-05-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: towards fixing a
|
||||
3.5.CALL_FUNCTONI_VAR bug
|
||||
|
||||
2017-05-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/04_CALL_FUNCTION_VAR_KW.py,
|
||||
uncompyle6/parsers/parse3.py: Python 3.5 kw arg can be an expr Fixes Issue #95
|
||||
|
||||
2017-05-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #117 from rocky/3.6-MAKE_FUNCTION 3.6 make function
|
||||
|
||||
2017-05-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: MAKE_FUNCTION_FLAGS can be a
|
||||
simpler tuple
|
||||
|
||||
2017-05-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Grammar rules for Python 3.6
|
||||
MAKE_FUNCTION
|
||||
|
||||
2017-05-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse36.py, uncompyle6/semantics/pysource.py:
|
||||
Bang on 3.6 MAKE_FUNCTION a bit more parse3.py, parse36.py: adding return_closure rule tags what's going
|
||||
on with this rule pysource.py: start changing semantic rules to support code changed
|
||||
by new make_function semantics README.rst: typo
|
||||
|
||||
2017-05-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Typo
|
||||
|
||||
2017-05-12 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse27.py: Bug in 2.7 decompiling ourself! Troublesome file was uncompyle6.semantics.pysource.engine()
|
||||
|
||||
2017-05-11 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #113 from grkov90/patch-1 Fixed out_base bug
|
||||
|
||||
2017-05-11 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/semantics/make_function.py: WIP: start 3.6 MAKE_FUNCTION
|
||||
handling
|
||||
|
||||
2017-05-11 Daniel Bradburn <moagstar@gmail.com>
|
||||
|
||||
* : Merge pull request #116 from moagstar/function_call_keyword_only Added support for Python 3.6 CALL_FUNCTION_KW
|
||||
|
||||
2017-05-10 Daniel Bradburn <moagstar@gmail.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Fixed bug in compiling double
|
||||
star arg only function calls where the closing parenthesis would be
|
||||
missed
|
||||
|
||||
2017-05-10 Daniel Bradburn <moagstar@gmail.com>
|
||||
|
||||
* requirements-dev.txt: Adding requirement for pytest >= 3.0 to fix
|
||||
strange INTERNALERROR in combination with hypothesis when using
|
||||
pytest 2.6.4
|
||||
|
||||
2017-05-10 Daniel Bradburn <moagstar@gmail.com>
|
||||
|
||||
* pytest/test_CALL_FUNCTION_KW.sh, pytest/test_function_call.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner36.py, uncompyle6/semantics/pysource.py:
|
||||
Added support for support for Python 3.6 CALL_FUNCTION_KW
|
||||
|
||||
2017-05-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* appveyor.yml, test/test_pyenvlib.py,
|
||||
uncompyle6/semantics/pysource.py: pysource guard and another
|
||||
appveyor test
|
||||
|
||||
2017-05-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* appveyor.yml: appveyor take 2
|
||||
|
||||
2017-05-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* appveyor.yml, appveyor/install.ps1, appveyor/run_with_env.cmd: Try
|
||||
appveyor
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: More guarded CONTINUE deletion
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanner.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/semantics/pysource.py: Reduce spurious "continue"
|
||||
statements
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile: --weak-verify on 3.3 with inclusion of last commit Note that the result is sematically equivalent, so it is is correct.
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/looping/12_if_while_true_pass.py,
|
||||
uncompyle6/scanners/scanner3.py: Python 3.x control-flow bug... "pass" statement inside "while True"
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* HOW-TO-REPORT-A-BUG.md: Small typo
|
||||
|
||||
2017-05-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py: Fix improper COME_FROM_EXCEPT in
|
||||
Python 3.3+
|
||||
|
||||
2017-05-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse33.py: python 3.3 while True parsing bug
|
||||
|
||||
2017-05-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.9.11
|
||||
|
||||
2017-05-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.9.11
|
||||
|
||||
2017-05-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile: fix PYTHON variable setting in test/Makefile
|
||||
|
||||
2017-05-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile, uncompyle6/scanners/scanner2.py,
|
||||
@@ -23,6 +680,14 @@
|
||||
|
||||
* .travis.yml: Try CI testing on Python 3.6
|
||||
|
||||
2017-05-03 Gregory <grkov90@gmail.com>
|
||||
|
||||
* uncompyle6/main.py: Some fix
|
||||
|
||||
2017-05-03 Gregory <grkov90@gmail.com>
|
||||
|
||||
* uncompyle6/main.py: Fixed out_base bug Variable filename using in for tags uncompyle6 -o haven't worked argument -o haven't worked
|
||||
|
||||
2017-05-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug35/01_map_unpack.py, uncompyle6/parser.py,
|
||||
|
15
HISTORY.md
15
HISTORY.md
@@ -44,8 +44,8 @@ it appears that Hartmut did most of the work to get this code to
|
||||
accept the full Python language. He added precedence to the table
|
||||
specifiers, support for multiple versions of Python, the
|
||||
pretty-printing of docstrings, lists, and hashes. He also wrote test and verification routines of
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He says he could verify against the
|
||||
entire Python library. However I have subsequently found small and relatively obscure bugs in the decompilation code.
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He says he could verify against the
|
||||
entire Python library. However I have subsequently found small and relatively obscure bugs in the decompilation code.
|
||||
|
||||
decompyle2.2 was packaged for Debian (sarge) by
|
||||
[Ben Burton around 2002](https://packages.qa.debian.org/d/decompyle.html). As
|
||||
@@ -66,7 +66,7 @@ code to handle first Python 2.3 and then 2.4 bytecodes. Because of
|
||||
jump optimization introduced in the CPython bytecode compiler at that
|
||||
time, various JUMP instructions were classifed as going backwards, and
|
||||
COME FROM instructions were reintroduced. See
|
||||
[RELEASE-2.4-CHANGELOG.txt](https://github.com/rocky/python-uncompyle6/blob/master/DECOMPYLE-2.4-CHANGELOG.txt)
|
||||
[RELEASE-2.4-CHANGELOG.txt](https://github.com/rocky/python-uncompyle6/blob/master/DECOMPYLE-2.4-CHANGELOG.txt)
|
||||
for more details here. There wasn't a public
|
||||
release of RELEASE-2.4 and bytecodes other than Python 2.4 weren't
|
||||
supported. Dan says the Python 2.3 version could verify the entire
|
||||
@@ -99,7 +99,7 @@ made a few commits later on. But mostly wibiti, and Guenther
|
||||
Starnberger got the code to where uncompyle2 was around 2012.
|
||||
|
||||
In `uncompyle`, decompilation of python bytecode 2.5 & 2.6 is done by
|
||||
transforming the byte code into a a pseudo 2.7 python bytecode and is
|
||||
transforming the byte code into a pseudo-2.7 Python bytecode and is
|
||||
based on code from Eloi Vanderbeken.
|
||||
|
||||
This project, `uncompyle6`, abandons that approach for various
|
||||
@@ -120,10 +120,10 @@ while, handling Python bytecodes from Python versions 2.5+ and
|
||||
3.2+. In doing so, it has been expedient to separate this into three
|
||||
projects:
|
||||
|
||||
* bytecode loading and disassembly ([xdis](https://pypi.python.org/pypi/xdis)),
|
||||
* marshaling/unmarshaling, bytecode loading and disassembly ([xdis](https://pypi.python.org/pypi/xdis)),
|
||||
* parsing and tree building ([spark_parser](https://pypi.python.org/pypi/spark_parser)),
|
||||
* this project - grammar and semantic actions for decompiling
|
||||
([uncompyle6](https://pypi.python.org/pypi/spark_parser)).
|
||||
([uncompyle6](https://pypi.python.org/pypi/uncompyle6)).
|
||||
|
||||
|
||||
Over the many years, code styles and Python features have
|
||||
@@ -162,5 +162,8 @@ support has been lagging.
|
||||
Tests for the project have been, or are being, culled from all of the
|
||||
projects mentioned.
|
||||
|
||||
For a little bit of the history of changes to the Early-algorithm parser,
|
||||
see the file [NEW-FEATURES.rst](https://github.com/rocky/python-spark/blob/master/NEW-FEATURES.rst) in the [python-spark github repository](https://github.com/rocky/python-spark).
|
||||
|
||||
NB. If you find mistakes, want corrections, or want your name added
|
||||
(or removed), please contact me.
|
||||
|
@@ -19,7 +19,7 @@ So it is likely you'll find a mistranslation in decompiling.
|
||||
The basic requirement is pretty simple:
|
||||
|
||||
* Python bytecode
|
||||
* Source text
|
||||
* Python source text
|
||||
|
||||
## What to send (additional helpful information)
|
||||
|
||||
@@ -50,7 +50,7 @@ one fool can learn, so can another."
|
||||
|
||||
## Narrowing the problem
|
||||
|
||||
I don't need the entire source code base for which one file or module
|
||||
I don't need or want the entire source code base for which one file or module
|
||||
can't be decompiled. I just need that one file or module only. If
|
||||
there are several files, file a bug report for each file.
|
||||
|
||||
|
2
Makefile
2
Makefile
@@ -36,6 +36,8 @@ check-2.7 check-3.3 check-3.4: pytest
|
||||
check-3.0 check-3.1 check-3.2 check-3.5 check-3.6:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
check-3.7: pytest
|
||||
|
||||
#:Tests for Python 2.6 (doesn't have pytest)
|
||||
check-2.4 check-2.5 check-2.6:
|
||||
$(MAKE) -C test $@
|
||||
|
101
NEWS
101
NEWS
@@ -1,7 +1,76 @@
|
||||
uncompyle6 2.10.0 2016-05-30 Elaine Gordon
|
||||
uncompyle6 2.13.0 2017-10-10
|
||||
|
||||
- Add fuzzy offset deparse lookup
|
||||
- 3.6 bugfixes
|
||||
- Fixes in deparsing lambda expressions
|
||||
- Improve table-semantics descriptions
|
||||
- Document hacky customize arg count better (until we can remove it)
|
||||
- Update to use xdis 3.7.0 or greater
|
||||
|
||||
uncompyle6 2.12.0 2017-09-26
|
||||
|
||||
- Use xdis 3.6.0 or greater now
|
||||
- Small semantic table cleanups
|
||||
- Python 3.4's terms a little names better
|
||||
- Slightly more Python 3.7, but still failing a lot
|
||||
|
||||
uncompyle6 2.11.5 2017-08-31
|
||||
|
||||
- Skeletal support for Python 3.7
|
||||
|
||||
uncompyle6 2.11.4 2017-08-15
|
||||
|
||||
* scanner and parser now allow 3-part version string lookups,
|
||||
e.g. 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
* unpin 3.5.1. xdis 3.5.4 has been releasd and fixes the problems we had. Use that.
|
||||
* some routnes here moved to xdis. Use the xdis version
|
||||
* README.rst: Link typo Name is trepan2 now not trepan
|
||||
* xdis-forced change adjust for COMPARE_OP "is-not" in
|
||||
semanatic routines. We need "is not".
|
||||
* Some PyPy tolerance in validate testing.
|
||||
* Some pyston tolerance
|
||||
|
||||
uncompyle6 2.11.3 2017-08-09
|
||||
|
||||
Very minor changes
|
||||
|
||||
- RsT doc fixes and updates
|
||||
- use newer xdis, but not too new; 3.5.2 breaks uncompyle6
|
||||
- use xdis opcode sets
|
||||
- xdis "exception match" is now "exception-match"
|
||||
|
||||
uncompyle6 2.11.2 2017-07-09
|
||||
|
||||
- Start supporting Pypy 3.5 (5.7.1-beta)
|
||||
- use xdis 3.5.0's opcode sets and require xdis 3.5.0
|
||||
- Correct some Python 2.4-2.6 loop detection
|
||||
- guard against badly formatted bytecode
|
||||
|
||||
uncompyle6 2.11.1 2017-06-25
|
||||
|
||||
- Python 3.x annotation and function signature fixes
|
||||
- Bump xdis version
|
||||
- Small pysource bug fixes
|
||||
|
||||
uncompyle6 2.11.0 2017-06-18 Fleetwood
|
||||
- Major improvements in fragment tracking
|
||||
* Add nonterminal node in extractInfo
|
||||
* tag more offsets in expressions
|
||||
* tag array subscripts
|
||||
* set YIELD value offset in a <yield> expr
|
||||
* fix a long-standing bug in not adjusting final AST when melding other deparse ASTs
|
||||
- Fixes yet again for make_function node handling; document what's up here
|
||||
- Fix bug in snowflake Python 3.5 *args kwargs
|
||||
|
||||
uncompyle6 2.10.1 2017-06-3 Marylin Frankel
|
||||
|
||||
- fix some fragments parsing bugs
|
||||
- was returning the wrong type sometimes in deparse_code_around_offset()
|
||||
- capture function name in offsets
|
||||
- track changes to ifelstrmtr node from pysource into fragments
|
||||
|
||||
uncompyle6 2.10.0 2017-05-30 Elaine Gordon
|
||||
|
||||
- Add fuzzy offset deparse look up
|
||||
- 3.6 bug fixes
|
||||
- fix EXTENDED_ARGS handling (and in 2.6 and others)
|
||||
- semantic routine make_function fragments.py
|
||||
- MAKE_FUNCTION handling
|
||||
@@ -12,19 +81,19 @@ uncompyle6 2.10.0 2016-05-30 Elaine Gordon
|
||||
- 3.5 FUNCTION_VAR bug
|
||||
- 3.x pass statement insdie while True
|
||||
- Improve 3.2 decompilation
|
||||
- Fixed -o argument processing (Gregrory)
|
||||
- Fixed -o argument processing (grkov90)
|
||||
- Reduce scope of LOAD_ASSERT as expr to 3.4+
|
||||
- "await" statement fixes
|
||||
- 2.3, 2.4 "if 1 .." fixes
|
||||
- 3.x annotation fixes
|
||||
|
||||
uncompyle6 2.9.11 2016-04-06
|
||||
uncompyle6 2.9.11 2017-04-06
|
||||
|
||||
- Better support for Python 3.5+ BUILD_MAP_UNPACK
|
||||
- Start 3.6 CALL_FUNCTION_EX support
|
||||
- Many decompilation bug fixes. (Many more remain). See ChangeLog
|
||||
|
||||
uncompyle6 2.9.10 2016-02-25
|
||||
uncompyle6 2.9.10 2017-02-25
|
||||
|
||||
- Python grammar rule fixes
|
||||
- Add ability to get grammar coverage on runs
|
||||
@@ -91,7 +160,7 @@ uncompyle6 2.9.6 2016-11-20
|
||||
uncompyle6 2.9.5 2016-11-13
|
||||
|
||||
- Fix Python 3 bugs:
|
||||
* improprer while 1 else
|
||||
* improper while 1 else
|
||||
* docstring indent
|
||||
* 3.3 default values in lambda expressions
|
||||
* start 3.0 decompilation (needs newer xdis)
|
||||
@@ -101,12 +170,12 @@ uncompyle6 2.9.5 2016-11-13
|
||||
uncompyle6 2.9.4 2016-11-02
|
||||
|
||||
- Handle Python 3.x function annotations
|
||||
- track def keywoard-parameter line-splitting in source code better
|
||||
- track def keyword-parameter line-splitting in source code better
|
||||
- bump min xdis version to mask previous xdis bug
|
||||
|
||||
uncompyle6 2.9.3 2016-10-26
|
||||
|
||||
Release forced by incompatiblity change in xdis 3.2.0.
|
||||
Release forced by incompatibility change in xdis 3.2.0.
|
||||
|
||||
- Python 3.1 bugs:
|
||||
* handle "with ... as"
|
||||
@@ -138,7 +207,7 @@ uncompyle6 2.9.0 2016-10-09
|
||||
this Forces change in requirements.txt and _pkg_info_.py
|
||||
- Start Python 1.5 decompiling; another round of work is needed to
|
||||
remove bugs
|
||||
- Simpify python 2.1 grammar
|
||||
- Simplify python 2.1 grammar
|
||||
- Fix bug with -t ... Wasn't showing source text when -t option was given
|
||||
- Fix 2.1-2.6 bug in list comprehension
|
||||
|
||||
@@ -161,7 +230,7 @@ control-flow structure detection is done.
|
||||
. 3.0 .. 3.2 *args processing
|
||||
. 3.0 .. 3.2 call name and kwargs bug
|
||||
. 3.0 .. getting parameter of *
|
||||
. 3.0 .. handling varible number of args
|
||||
. 3.0 .. handling variable number of args
|
||||
. 3.0 .. "if" structure bugs
|
||||
* 3.5+ if/else bugs
|
||||
* 2.2-2.6 bugs
|
||||
@@ -212,7 +281,7 @@ uncompyle6 2.7.1 2016-07-26
|
||||
|
||||
uncompyle6 2.7.0 2016-07-15
|
||||
|
||||
- Many Syntax and verifification bugs removed
|
||||
- Many Syntax and verification bugs removed
|
||||
tested on standard libraries from 2.3.7 to 3.5.1
|
||||
and they all decompile and verify fine.
|
||||
I'm sure there are more bugs though.
|
||||
@@ -239,9 +308,9 @@ uncompyle6 2.6.0 2016-07-07
|
||||
- Better <2.6 vs. 2.7 grammar separation
|
||||
- Fix some 2.7 deparsing bugs
|
||||
- Fix bug in installing uncompyle6 script
|
||||
- Doc improvments
|
||||
- Doc improvements
|
||||
|
||||
uncompyle6 2.5.0 2016-06-22 Summer Solstace
|
||||
uncompyle6 2.5.0 2016-06-22 Summer Solstice
|
||||
|
||||
- Much better Python 3.2-3.5 coverage.
|
||||
3.4.6 is probably the best;3.2 and 3.5 are weaker
|
||||
@@ -253,7 +322,7 @@ uncompyle6 2.5.0 2016-06-22 Summer Solstace
|
||||
uncompyle6 2.4.0 2016-05-18 (in memory of Lewis Bernstein)
|
||||
|
||||
- Many Python 3 bugs fixed:
|
||||
* Python 3.2 to 3.5 libaries largely
|
||||
* Python 3.2 to 3.5 libraries largely
|
||||
uncompyle and most verify
|
||||
- pydisassembler:
|
||||
* disassembles all code objects in a file
|
||||
@@ -311,7 +380,7 @@ uncompyle6 2.2.0 2016-04-30
|
||||
|
||||
uncompyle6 2.2.0 2016-04-02
|
||||
|
||||
- Support single-mode (in addtion to exec-mode) compilation
|
||||
- Support single-mode (in addition to exec-mode) compilation
|
||||
- Start to DRY Python 2 and Python 3 grammars
|
||||
- Fix bug in if else ternary construct
|
||||
- Fix bug in uncomplye6 -d and -r options (via lelicopter)
|
||||
|
17
README.rst
17
README.rst
@@ -1,10 +1,10 @@
|
||||
|buildstatus| |Supported Python Versions|
|
||||
|buildstatus|
|
||||
|
||||
uncompyle6
|
||||
==========
|
||||
|
||||
A native Python cross-version Decompiler and Fragment Decompiler.
|
||||
Follows in the tradition of decompyle, uncompyle, and uncompyle2.
|
||||
The successor to decompyle, uncompyle, and uncompyle2.
|
||||
|
||||
|
||||
Introduction
|
||||
@@ -12,7 +12,7 @@ Introduction
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 1.5, and 2.1 to
|
||||
3.6 or so, including PyPy bytecode and Dropbox's Python 2.5 bytecode.
|
||||
3.7 or so, including PyPy bytecode and Dropbox's Python 2.5 bytecode.
|
||||
|
||||
Why this?
|
||||
---------
|
||||
@@ -56,7 +56,7 @@ This uses setup.py, so it follows the standard Python routine:
|
||||
|
||||
::
|
||||
|
||||
pip install -r requirements.txt
|
||||
pip install -e .
|
||||
pip install -r requirements-dev.txt
|
||||
python setup.py install # may need sudo
|
||||
# or if you have pyenv:
|
||||
@@ -171,9 +171,12 @@ See Also
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here.
|
||||
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Include some fixes like supporting function annotations
|
||||
* The HISTORY_ file.
|
||||
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HOW-TO-REPORT-A-BUG.md>`_
|
||||
* https://github.com/rocky/python-xdis : Cross Python version disassembler
|
||||
* https://github.com/rocky/python-xasm : Cross Python version assembler
|
||||
|
||||
.. |downloads| image:: https://img.shields.io/pypi/dd/uncompyle6.svg
|
||||
.. _trepan: https://pypi.python.org/pypi/trepan
|
||||
|
||||
.. _trepan: https://pypi.python.org/pypi/trepan2
|
||||
.. _HISTORY: https://github.com/rocky/python-uncompyle6/blob/master/HISTORY.md
|
||||
.. _debuggers: https://pypi.python.org/pypi/trepan3k
|
||||
.. _remake: https://bashdb.sf.net/remake
|
||||
@@ -181,7 +184,5 @@ See Also
|
||||
.. _this: https://github.com/rocky/python-uncompyle6/wiki/Deparsing-technology-and-its-use-in-exact-location-reporting
|
||||
.. |buildstatus| image:: https://travis-ci.org/rocky/python-uncompyle6.svg
|
||||
:target: https://travis-ci.org/rocky/python-uncompyle6
|
||||
.. |Supported Python Versions| image:: https://img.shields.io/pypi/pyversions/uncompyle6.svg
|
||||
:target: https://pypi.python.org/pypi/uncompyle6/
|
||||
.. _PJOrion: http://www.koreanrandom.com/forum/topic/15280-pjorion-%D1%80%D0%B5%D0%B4%D0%B0%D0%BA%D1%82%D0%B8%D1%80%D0%BE%D0%B2%D0%B0%D0%BD%D0%B8%D0%B5-%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%B4%D0%B5%D0%BA%D0%BE%D0%BC%D0%BF%D0%B8%D0%BB%D1%8F%D1%86%D0%B8%D1%8F-%D0%BE%D0%B1%D1%84
|
||||
.. _Deobfuscator: https://github.com/extremecoders-re/PjOrion-Deobfuscator
|
||||
|
@@ -33,14 +33,14 @@ classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
# The rest in alphabetic order
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
entry_points={
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'uncompyle6=uncompyle6.bin.uncompile:main_bin',
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.6.1, < 1.7.0',
|
||||
'xdis >= 3.3.1, < 3.4.0']
|
||||
install_requires = ['spark-parser >= 1.7.0, < 1.8.0',
|
||||
'xdis >= 3.6.0, < 3.7.0']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
@@ -6,7 +6,7 @@ machine:
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
- pip install -r requirements.txt
|
||||
- pip install -e .
|
||||
- pip install -r requirements-dev.txt
|
||||
test:
|
||||
override:
|
||||
|
11
pytest/test_basic.py
Normal file
11
pytest/test_basic.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from uncompyle6.scanner import get_scanner
|
||||
from uncompyle6.parser import get_python_parser
|
||||
|
||||
def test_get_scanner():
|
||||
# See that we can retrieve a scanner using a full version number
|
||||
assert get_scanner('2.7.13')
|
||||
|
||||
|
||||
def test_get_parser():
|
||||
# See that we can retrieve a sparser using a full version number
|
||||
assert get_python_parser('2.7.13')
|
@@ -29,7 +29,7 @@ def list_comp():
|
||||
[y for y in range(3)]
|
||||
|
||||
def get_parsed_for_fn(fn):
|
||||
code = fn.__code__ if PYTHON3 else fn.func_code
|
||||
code = fn.func_code
|
||||
return deparse(PYTHON_VERSION, code)
|
||||
|
||||
def check_expect(expect, parsed):
|
||||
|
@@ -10,7 +10,7 @@ else:
|
||||
maxint = sys.maxint
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
|
||||
class PrintFake():
|
||||
class PrintFake:
|
||||
def __init__(self):
|
||||
self.pending_newlines = 0
|
||||
self.f = StringIO()
|
||||
|
@@ -21,9 +21,8 @@ def bug_loop(disassemble, tb=None):
|
||||
disassemble(tb)
|
||||
|
||||
def test_if_in_for():
|
||||
code = bug.__code__
|
||||
code = bug.func_code
|
||||
scan = get_scanner(PYTHON_VERSION)
|
||||
print(PYTHON_VERSION)
|
||||
if 2.7 <= PYTHON_VERSION <= 3.0 and not IS_PYPY:
|
||||
n = scan.setup_code(code)
|
||||
scan.build_lines_data(code, n)
|
||||
|
@@ -1,175 +0,0 @@
|
||||
# std
|
||||
import string
|
||||
# 3rd party
|
||||
from hypothesis import given, assume, example, settings, strategies as st
|
||||
import pytest
|
||||
# uncompyle
|
||||
from validate import validate_uncompyle
|
||||
from test_fstring import expressions
|
||||
|
||||
|
||||
alpha = st.sampled_from(string.ascii_lowercase)
|
||||
numbers = st.sampled_from(string.digits)
|
||||
alphanum = st.sampled_from(string.ascii_lowercase + string.digits)
|
||||
|
||||
|
||||
@st.composite
|
||||
def function_calls(draw,
|
||||
min_keyword_args=0, max_keyword_args=5,
|
||||
min_positional_args=0, max_positional_args=5,
|
||||
min_star_args=0, max_star_args=1,
|
||||
min_double_star_args=0, max_double_star_args=1):
|
||||
"""
|
||||
Strategy factory for generating function calls.
|
||||
|
||||
:param draw: Callable which draws examples from other strategies.
|
||||
|
||||
:return: The function call text.
|
||||
"""
|
||||
st_positional_args = st.lists(
|
||||
alpha,
|
||||
min_size=min_positional_args,
|
||||
max_size=max_positional_args
|
||||
)
|
||||
st_keyword_args = st.lists(
|
||||
alpha,
|
||||
min_size=min_keyword_args,
|
||||
max_size=max_keyword_args
|
||||
)
|
||||
st_star_args = st.lists(
|
||||
alpha,
|
||||
min_size=min_star_args,
|
||||
max_size=max_star_args
|
||||
)
|
||||
st_double_star_args = st.lists(
|
||||
alpha,
|
||||
min_size=min_double_star_args,
|
||||
max_size=max_double_star_args
|
||||
)
|
||||
|
||||
positional_args = draw(st_positional_args)
|
||||
keyword_args = draw(st_keyword_args)
|
||||
st_values = st.lists(
|
||||
expressions(),
|
||||
min_size=len(keyword_args),
|
||||
max_size=len(keyword_args)
|
||||
)
|
||||
keyword_args = [
|
||||
x + '=' + e
|
||||
for x, e in
|
||||
zip(keyword_args, draw(st_values))
|
||||
]
|
||||
star_args = ['*' + x for x in draw(st_star_args)]
|
||||
double_star_args = ['**' + x for x in draw(st_double_star_args)]
|
||||
|
||||
arguments = positional_args + keyword_args + star_args + double_star_args
|
||||
draw(st.randoms()).shuffle(arguments)
|
||||
arguments = ','.join(arguments)
|
||||
|
||||
function_call = 'fn({arguments})'.format(arguments=arguments)
|
||||
try:
|
||||
# TODO: Figure out the exact rules for ordering of positional, keyword,
|
||||
# star args, double star args and in which versions the various
|
||||
# types of arguments are supported so we don't need to check that the
|
||||
# expression compiles like this.
|
||||
compile(function_call, '<string>', 'single')
|
||||
except:
|
||||
assume(False)
|
||||
return function_call
|
||||
|
||||
|
||||
def test_function_no_args():
|
||||
validate_uncompyle("fn()")
|
||||
|
||||
|
||||
def isolated_function_calls(which):
|
||||
"""
|
||||
Returns a strategy for generating function calls, but isolated to
|
||||
particular types of arguments, for example only positional arguments.
|
||||
|
||||
This can help reason about debugging errors in specific types of function
|
||||
calls.
|
||||
|
||||
:param which: One of 'keyword', 'positional', 'star', 'double_star'
|
||||
|
||||
:return: Strategy for generating an function call isolated to specific
|
||||
argument types.
|
||||
"""
|
||||
kwargs = dict(
|
||||
max_keyword_args=0,
|
||||
max_positional_args=0,
|
||||
max_star_args=0,
|
||||
max_double_star_args=0,
|
||||
)
|
||||
kwargs['_'.join(('min', which, 'args'))] = 1
|
||||
kwargs['_'.join(('max', which, 'args'))] = 5 if 'star' not in which else 1
|
||||
return function_calls(**kwargs)
|
||||
|
||||
|
||||
with settings(max_examples=25):
|
||||
|
||||
@given(isolated_function_calls('positional'))
|
||||
@example("fn(0)")
|
||||
def test_function_positional_only(expr):
|
||||
validate_uncompyle(expr)
|
||||
|
||||
@given(isolated_function_calls('keyword'))
|
||||
@example("fn(a=0)")
|
||||
def test_function_call_keyword_only(expr):
|
||||
validate_uncompyle(expr)
|
||||
|
||||
@given(isolated_function_calls('star'))
|
||||
@example("fn(*items)")
|
||||
def test_function_call_star_only(expr):
|
||||
validate_uncompyle(expr)
|
||||
|
||||
@given(isolated_function_calls('double_star'))
|
||||
@example("fn(**{})")
|
||||
def test_function_call_double_star_only(expr):
|
||||
validate_uncompyle(expr)
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_CONST_KEY_MAP_BUILD_MAP_UNPACK_WITH_CALL_BUILD_TUPLE_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(w=0,m=0,**v)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_MAP_BUILD_MAP_UNPACK_WITH_CALL_BUILD_TUPLE_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(a=0,**g)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(*g,**j)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_MAP_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(*z,u=0)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_TUPLE_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(**a)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_MAP_BUILD_TUPLE_BUILD_TUPLE_UNPACK_WITH_CALL_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(b,b,b=0,*a)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_TUPLE_BUILD_TUPLE_UNPACK_WITH_CALL_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(*c,v)")
|
||||
|
||||
|
||||
@pytest.mark.xfail()
|
||||
def test_BUILD_CONST_KEY_MAP_CALL_FUNCTION_EX():
|
||||
validate_uncompyle("fn(i=0,y=0,*p)")
|
||||
|
||||
|
||||
@pytest.mark.skip(reason='skipping property based test until all individual tests are passing')
|
||||
@given(function_calls())
|
||||
def test_function_call(function_call):
|
||||
validate_uncompyle(function_call)
|
@@ -11,15 +11,16 @@ def test_grammar():
|
||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
assert remain_tokens == set([]), \
|
||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dumpGrammar())
|
||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
|
||||
|
||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
expect_lhs = set(['expr1024', 'pos_arg'])
|
||||
unused_rhs = set(['build_list', 'call_function', 'mkfunc',
|
||||
'mklambda',
|
||||
'unpack', 'unpack_list'])
|
||||
expect_right_recursive = [['designList', ('designator', 'DUP_TOP', 'designList')]]
|
||||
expect_right_recursive = frozenset([('designList',
|
||||
('designator', 'DUP_TOP', 'designList'))])
|
||||
if PYTHON3:
|
||||
expect_lhs.add('load_genexpr')
|
||||
|
||||
@@ -39,13 +40,14 @@ def test_grammar():
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
ignore_set = set(
|
||||
"""
|
||||
JUMP_BACK CONTINUE RETURN_END_IF
|
||||
JUMP_BACK CONTINUE
|
||||
COME_FROM COME_FROM_EXCEPT
|
||||
COME_FROM_EXCEPT_CLAUSE
|
||||
COME_FROM_LOOP COME_FROM_WITH
|
||||
COME_FROM_FINALLY ELSE
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
LAMBDA_MARKER
|
||||
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
||||
""".split())
|
||||
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||
|
164
pytest/test_pysource.py
Normal file
164
pytest/test_pysource.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.consts import (
|
||||
escape, NONE,
|
||||
# RETURN_NONE, PASS, RETURN_LOCALS
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from io import StringIO
|
||||
else:
|
||||
from StringIO import StringIO
|
||||
|
||||
from uncompyle6.semantics.pysource import SourceWalker as SourceWalker
|
||||
|
||||
def test_template_engine():
|
||||
s = StringIO()
|
||||
sw = SourceWalker(2.7, s, None)
|
||||
sw.ast = NONE
|
||||
sw.template_engine(('--%c--', 0), NONE)
|
||||
print(sw.f.getvalue())
|
||||
assert sw.f.getvalue() == '--None--'
|
||||
# FIXME: and so on...
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
TABLE_R, TABLE_DIRECT,
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.fragments import (
|
||||
TABLE_DIRECT_FRAGMENT,
|
||||
)
|
||||
|
||||
skip_for_now = "DELETE_DEREF".split()
|
||||
|
||||
def test_tables():
|
||||
for t, name, fragment in (
|
||||
(TABLE_DIRECT, 'TABLE_DIRECT', False),
|
||||
(TABLE_R, 'TABLE_R', False),
|
||||
(TABLE_DIRECT_FRAGMENT, 'TABLE_DIRECT_FRAGMENT', True)):
|
||||
for k, entry in t.iteritems():
|
||||
if k in skip_for_now:
|
||||
continue
|
||||
fmt = entry[0]
|
||||
arg = 1
|
||||
i = 0
|
||||
m = escape.search(fmt)
|
||||
print("%s[%s]" % (name, k))
|
||||
while m:
|
||||
i = m.end()
|
||||
typ = m.group('type') or '{'
|
||||
if typ in frozenset(['%', '+', '-', '|', ',', '{']):
|
||||
# No args
|
||||
pass
|
||||
elif typ in frozenset(['c', 'p', 'P', 'C', 'D']):
|
||||
# One arg - should be int or tuple of int
|
||||
if typ == 'c':
|
||||
assert isinstance(entry[arg], int), (
|
||||
"%s[%s][%d] type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
elif typ in frozenset(['C', 'D']):
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 3
|
||||
for j, x in enumerate(tup[:-1]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[-1], str) or tup[-1] is None, (
|
||||
"%s[%s][%d][%d] sep type %s is %s should be an string but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, tup[-1], type(x), entry)
|
||||
)
|
||||
|
||||
elif typ == 'P':
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 4
|
||||
for j, x in enumerate(tup[:-2]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[-2], str), (
|
||||
"%s[%s][%d][%d] sep type %s is '%s' should be an string but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[1], int), (
|
||||
"%s[%s][%d][%d] prec type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
|
||||
else:
|
||||
# Should be a tuple which contains only ints
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 2
|
||||
for j, x in enumerate(tup):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type '%s' is '%s should be an int but is %s. Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
pass
|
||||
arg += 1
|
||||
elif typ in frozenset(['r']) and fragment:
|
||||
pass
|
||||
elif typ == 'b' and fragment:
|
||||
assert isinstance(entry[arg], int), (
|
||||
"%s[%s][%d] type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
arg += 1
|
||||
elif typ == 'x' and fragment:
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 2
|
||||
assert isinstance(tup[0], int), (
|
||||
"%s[%s][%d] source type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert isinstance(tup[1], tuple), (
|
||||
"%s[%s][%d] dest type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
for j, x in enumerate(tup[1]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is %s should be an int but is %s. Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
arg += 1
|
||||
pass
|
||||
else:
|
||||
assert False, (
|
||||
"%s[%s][%d] type %s is not known. Full entry: %s" %
|
||||
(name, k, arg, typ, entry)
|
||||
)
|
||||
m = escape.search(fmt, i)
|
||||
pass
|
||||
assert arg == len(entry), (
|
||||
"%s[%s] arg %d should be length of entry %d. Full entry: %s" %
|
||||
(name, k, arg, len(entry), entry))
|
@@ -1,19 +1,19 @@
|
||||
import pytest
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3, deparse_code
|
||||
from uncompyle6 import PYTHON_VERSION, deparse_code
|
||||
|
||||
def test_single_mode():
|
||||
single_expressions = (
|
||||
'i = 1',
|
||||
'i and (j or k)',
|
||||
'i += 1',
|
||||
'i = j % 4',
|
||||
'i = {}',
|
||||
'i = []',
|
||||
'for i in range(10):\n i\n',
|
||||
'for i in range(10):\n for j in range(10):\n i + j\n',
|
||||
'try:\n i\nexcept Exception:\n j\nelse:\n k\n'
|
||||
)
|
||||
if PYTHON_VERSION >= 2.5:
|
||||
def test_single_mode():
|
||||
single_expressions = (
|
||||
'i = 1',
|
||||
'i and (j or k)',
|
||||
'i += 1',
|
||||
'i = j % 4',
|
||||
'i = {}',
|
||||
'i = []',
|
||||
'for i in range(10):\n i\n',
|
||||
'for i in range(10):\n for j in range(10):\n i + j\n',
|
||||
'try:\n i\nexcept Exception:\n j\nelse:\n k\n'
|
||||
)
|
||||
|
||||
for expr in single_expressions:
|
||||
code = compile(expr + '\n', '<string>', 'single')
|
||||
assert deparse_code(PYTHON_VERSION, code, compile_mode='single').text == expr + '\n'
|
||||
for expr in single_expressions:
|
||||
code = compile(expr + '\n', '<string>', 'single')
|
||||
assert deparse_code(PYTHON_VERSION, code, compile_mode='single').text == expr + '\n'
|
||||
|
@@ -1,24 +1,25 @@
|
||||
# future
|
||||
from __future__ import print_function
|
||||
# std
|
||||
import os
|
||||
import difflib
|
||||
import subprocess
|
||||
import tempfile
|
||||
import functools
|
||||
# compatability
|
||||
import six
|
||||
|
||||
from StringIO import StringIO
|
||||
# uncompyle6 / xdis
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY, deparse_code
|
||||
# TODO : I think we can get xdis to support the dis api (python 3 version) by doing something like this there
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.main import get_opcode
|
||||
opc = get_opcode(PYTHON_VERSION, IS_PYPY)
|
||||
Bytecode = functools.partial(Bytecode, opc=opc)
|
||||
|
||||
try:
|
||||
import functools
|
||||
Bytecode = functools.partial(Bytecode, opc=opc)
|
||||
def _dis_to_text(co):
|
||||
return Bytecode(co).dis()
|
||||
except:
|
||||
pass
|
||||
|
||||
def _dis_to_text(co):
|
||||
return Bytecode(co).dis()
|
||||
|
||||
|
||||
def print_diff(original, uncompyled):
|
||||
@@ -42,8 +43,11 @@ def print_diff(original, uncompyled):
|
||||
print('\nTo display diff highlighting run:\n pip install BeautifulSoup4')
|
||||
diff = difflib.HtmlDiff().make_table(*args)
|
||||
|
||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||
f = tempfile.NamedTemporaryFile(delete=False)
|
||||
try:
|
||||
f.write(str(diff).encode('utf-8'))
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
try:
|
||||
print()
|
||||
@@ -60,8 +64,7 @@ def print_diff(original, uncompyled):
|
||||
print('\nFor side by side diff install elinks')
|
||||
diff = difflib.Differ().compare(original_lines, uncompyled_lines)
|
||||
print('\n'.join(diff))
|
||||
finally:
|
||||
os.unlink(f.name)
|
||||
os.unlink(f.name)
|
||||
|
||||
|
||||
def are_instructions_equal(i1, i2):
|
||||
@@ -123,7 +126,10 @@ def validate_uncompyle(text, mode='exec'):
|
||||
original_text = text
|
||||
|
||||
deparsed = deparse_code(PYTHON_VERSION, original_code,
|
||||
compile_mode=mode, out=six.StringIO())
|
||||
|
||||
compile_mode=mode,
|
||||
out=StringIO(),
|
||||
is_pypy=IS_PYPY)
|
||||
uncompyled_text = deparsed.text
|
||||
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')
|
||||
|
||||
|
@@ -1,4 +1,3 @@
|
||||
pytest>=3.0.0
|
||||
flake8
|
||||
hypothesis
|
||||
six
|
@@ -39,7 +39,7 @@ check-3.3: check-bytecode
|
||||
|
||||
#: Run working tests from Python 3.4
|
||||
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.5
|
||||
check-3.5: check-bytecode
|
||||
|
BIN
test/bytecode_2.6/03_loop_if_cf.pyc
Normal file
BIN
test/bytecode_2.6/03_loop_if_cf.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
Binary file not shown.
@@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# Mode: -*- python -*-
|
||||
#
|
||||
# Copyright (c) 2015 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
# Copyright (c) 2015, 2017 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
import dis, os.path
|
||||
|
@@ -1,7 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6 import uncompyle
|
||||
import sys, inspect
|
||||
|
||||
|
5
test/simple_source/branching/02_ifelse_lambda.py
Normal file
5
test/simple_source/branching/02_ifelse_lambda.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# We have to do contortions here because
|
||||
# lambda's have to be more or less on a line
|
||||
|
||||
f = lambda x: 1 if x<2 else 3
|
||||
f(5)
|
19
test/simple_source/bug26/03_loop_if_cf.py
Normal file
19
test/simple_source/bug26/03_loop_if_cf.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Bug in < 2.6 is having a COME_FROM_LOOP (but we
|
||||
# don't tag that so it is just COME_FROM *before*
|
||||
# a jump back to the loop.
|
||||
def pickup(self, open_players, open_buf, wrap_buf):
|
||||
for aplayer in self._game.active_players:
|
||||
|
||||
if aplayer in open_players:
|
||||
aplayer.send(open_players)
|
||||
|
||||
if self == aplayer:
|
||||
for awatcher in self._watchers:
|
||||
if awatcher._can_see_detail:
|
||||
awatcher.send(open_buf)
|
||||
else:
|
||||
awatcher.send(wrap_buf)
|
||||
else:
|
||||
self._game.send(aplayer.side)
|
||||
else:
|
||||
self._game.send(aplayer.side, wrap_buf)
|
@@ -9,7 +9,7 @@ def open(file, mode = "r", buffering = None,
|
||||
newline = None, closefd = True) -> "IOBase":
|
||||
return text
|
||||
|
||||
def foo(x: 'an argument that defaults to 5' = 5):
|
||||
def foo1(x: 'an argument that defaults to 5' = 5):
|
||||
print(x)
|
||||
|
||||
def div(a: dict(type=float, help='the dividend'),
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# sql/schema.py
|
||||
# Note that kwargs comes before "positional" args
|
||||
def tometadata(self, metadata, schema, Table, args, name=None):
|
||||
table = Table(
|
||||
name, metadata, schema=schema,
|
||||
|
@@ -27,7 +27,7 @@ from fnmatch import fnmatch
|
||||
|
||||
TEST_VERSIONS=('2.3.7', '2.4.6', '2.5.6', '2.6.9',
|
||||
'pypy-2.4.0', 'pypy-2.6.1',
|
||||
'pypy-5.0.1', 'pypy-5.3.1',
|
||||
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
|
||||
'2.7.10', '2.7.11', '2.7.12', '2.7.13',
|
||||
'3.0.1', '3.1.5', '3.2.6',
|
||||
'3.3.5', '3.3.6',
|
||||
|
@@ -169,13 +169,13 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
main(src_dir, target_dir, files, [],
|
||||
do_verify=opts['do_verify'])
|
||||
if failed_files != 0:
|
||||
exit(2)
|
||||
sys.exit(2)
|
||||
elif failed_verify != 0:
|
||||
exit(3)
|
||||
sys.exit(3)
|
||||
|
||||
except (KeyboardInterrupt, OSError):
|
||||
print()
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
if test_opts['rmtree']:
|
||||
parent_dir = os.path.dirname(target_dir)
|
||||
print("Everything good, removing %s" % parent_dir)
|
||||
|
@@ -9,10 +9,10 @@ Common uncompyle parser routines.
|
||||
import sys
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.magics import py_str2float
|
||||
from spark_parser import GenericASTBuilder, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.show import maybe_show_asm
|
||||
|
||||
|
||||
class ParserError(Exception):
|
||||
def __init__(self, token, offset):
|
||||
self.token = token
|
||||
@@ -42,21 +42,25 @@ class PythonParser(GenericASTBuilder):
|
||||
else:
|
||||
return self.ast_first_offset(ast[0])
|
||||
|
||||
def add_unique_rule(self, rule, opname, count, customize):
|
||||
def add_unique_rule(self, rule, opname, arg_count, customize):
|
||||
"""Add rule to grammar, but only if it hasn't been added previously
|
||||
opname and count are used in the customize() semantic the actions
|
||||
to add the semantic action rule. Often, count is not used.
|
||||
opname and stack_count are used in the customize() semantic
|
||||
the actions to add the semantic action rule. Stack_count is
|
||||
used in custom opcodes like MAKE_FUNCTION to indicate how
|
||||
many arguments it has. Often it is not used.
|
||||
"""
|
||||
if rule not in self.new_rules:
|
||||
# print("XXX ", rule) # debug
|
||||
self.new_rules.add(rule)
|
||||
self.addRule(rule, nop_func)
|
||||
customize[opname] = count
|
||||
customize[opname] = arg_count
|
||||
pass
|
||||
return
|
||||
|
||||
def add_unique_rules(self, rules, customize):
|
||||
"""Add rules (a list of string) to grammar
|
||||
"""Add rules (a list of string) to grammar. Note that
|
||||
the rules must not be those that set arg_count in the
|
||||
custom dictionary.
|
||||
"""
|
||||
for rule in rules:
|
||||
if len(rule) == 0:
|
||||
@@ -66,7 +70,9 @@ class PythonParser(GenericASTBuilder):
|
||||
return
|
||||
|
||||
def add_unique_doc_rules(self, rules_str, customize):
|
||||
"""Add rules (a docstring-like list of rules) to grammar
|
||||
"""Add rules (a docstring-like list of rules) to grammar.
|
||||
Note that the rules must not be those that set arg_count in the
|
||||
custom dictionary.
|
||||
"""
|
||||
rules = [r.strip() for r in rules_str.split("\n")]
|
||||
self.add_unique_rules(rules, customize)
|
||||
@@ -83,17 +89,14 @@ class PythonParser(GenericASTBuilder):
|
||||
for i in dir(self):
|
||||
setattr(self, i, None)
|
||||
|
||||
def debug_reduce(self, rule, tokens, parent, i):
|
||||
def debug_reduce(self, rule, tokens, parent, last_token_pos):
|
||||
"""Customized format and print for our kind of tokens
|
||||
which gets called in debugging grammar reduce rules
|
||||
"""
|
||||
def fix(c):
|
||||
s = str(c)
|
||||
i = s.find('_')
|
||||
if i == -1:
|
||||
return s
|
||||
else:
|
||||
return s[:i]
|
||||
last_token_pos = s.find('_')
|
||||
return s if last_token_pos == -1 else s[:last_token_pos]
|
||||
|
||||
prefix = ''
|
||||
if parent and tokens:
|
||||
@@ -105,13 +108,13 @@ class PythonParser(GenericASTBuilder):
|
||||
if hasattr(p_token, 'offset'):
|
||||
prefix += "%3s" % fix(p_token.offset)
|
||||
if len(rule[1]) > 1:
|
||||
prefix += '-%-3s ' % fix(tokens[i-1].offset)
|
||||
prefix += '-%-3s ' % fix(tokens[last_token_pos-1].offset)
|
||||
else:
|
||||
prefix += ' '
|
||||
else:
|
||||
prefix = ' '
|
||||
|
||||
print("%s%s ::= %s" % (prefix, rule[0], ' '.join(rule[1])))
|
||||
print("%s%s ::= %s (%d)" % (prefix, rule[0], ' '.join(rule[1]), last_token_pos))
|
||||
|
||||
def error(self, instructions, index):
|
||||
# Find the last line boundary
|
||||
@@ -132,7 +135,7 @@ class PythonParser(GenericASTBuilder):
|
||||
raise ParserError(err_token, err_token.offset)
|
||||
|
||||
def typestring(self, token):
|
||||
return token.type
|
||||
return token.kind
|
||||
|
||||
def nonterminal(self, nt, args):
|
||||
if nt in self.collect and len(args) > 1:
|
||||
@@ -254,8 +257,11 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
stmt ::= return_stmt
|
||||
return_stmt ::= ret_expr RETURN_VALUE
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
|
||||
return_stmts ::= return_stmt
|
||||
return_stmts ::= _stmts return_stmt
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -530,7 +536,9 @@ class PythonParser(GenericASTBuilder):
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
return_lambda ::= ret_expr RETURN_VALUE_LAMBDA LAMBDA_MARKER
|
||||
return_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
@@ -609,7 +617,15 @@ def get_python_parser(
|
||||
explanation of the different modes.
|
||||
"""
|
||||
|
||||
# If version is a string, turn that into the corresponding float.
|
||||
if isinstance(version, str):
|
||||
version = py_str2float(version)
|
||||
|
||||
# FIXME: there has to be a better way...
|
||||
# We could do this as a table lookup, but that would force us
|
||||
# in import all of the parsers all of the time. Perhaps there is
|
||||
# a lazy way of doing the import?
|
||||
|
||||
if version < 3.0:
|
||||
if version == 1.5:
|
||||
import uncompyle6.parsers.parse15 as parse15
|
||||
@@ -718,7 +734,7 @@ def get_python_parser(
|
||||
else:
|
||||
p = parse3.Python3ParserSingle(debug_parser)
|
||||
p.version = version
|
||||
# p.dumpGrammar() # debug
|
||||
# p.dump_grammar() # debug
|
||||
return p
|
||||
|
||||
class PythonParserSingle(PythonParser):
|
||||
@@ -762,6 +778,7 @@ def python_parser(version, co, out=sys.stdout, showasm=False,
|
||||
if __name__ == '__main__':
|
||||
def parse_test(co):
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
ast = python_parser('2.7.13', co, showasm=True, is_pypy=True)
|
||||
ast = python_parser(PYTHON_VERSION, co, showasm=True, is_pypy=IS_PYPY)
|
||||
print(ast)
|
||||
return
|
||||
|
@@ -29,8 +29,8 @@ class Python15ParserSingle(Python21Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python15Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -395,6 +395,8 @@ class Python2Parser(PythonParser):
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
if tokens is None:
|
||||
return False
|
||||
lhs = rule[0]
|
||||
if lhs in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
return True
|
||||
@@ -415,4 +417,4 @@ class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python2Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -33,8 +33,8 @@ class Python21ParserSingle(Python22Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python21Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -26,8 +26,8 @@ class Python22ParserSingle(Python23Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python22Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -67,8 +67,8 @@ class Python23ParserSingle(Python23Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python23Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -55,13 +55,14 @@ class Python24Parser(Python25Parser):
|
||||
invalid = super(Python24Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
|
||||
# FiXME: this code never gets called...
|
||||
lhs = rule[0]
|
||||
if lhs == 'nop_stmt':
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
l = len(tokens)
|
||||
if 0 <= l < len(tokens):
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
|
||||
return False
|
||||
|
||||
@@ -71,4 +72,4 @@ class Python24ParserSingle(Python24Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python24Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -60,4 +60,4 @@ class Python25ParserSingle(Python26Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python25Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -84,6 +84,12 @@ class Python26Parser(Python2Parser):
|
||||
ja_cf_pop ::= JUMP_ABSOLUTE come_froms POP_TOP
|
||||
jf_cf_pop ::= JUMP_FORWARD come_froms POP_TOP
|
||||
|
||||
# The first optional COME_FROM when it appears is really
|
||||
# COME_FROM_LOOP, but in <= 2.6 we don't distinguish
|
||||
# this
|
||||
|
||||
cf_jb_cf_pop ::= _come_from JUMP_BACK come_froms POP_TOP
|
||||
|
||||
bp_come_from ::= POP_BLOCK COME_FROM
|
||||
jb_bp_come_from ::= JUMP_BACK bp_come_from
|
||||
|
||||
@@ -111,7 +117,8 @@ class Python26Parser(Python2Parser):
|
||||
break_stmt ::= BREAK_LOOP JUMP_BACK
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jb_cf_pop else_suitel
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
# Semantic actions want suite_stmts_opt to be at index 3
|
||||
@@ -240,7 +247,9 @@ class Python26Parser(Python2Parser):
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO COME_FROM POP_TOP _come_from
|
||||
|
||||
conditional_lambda ::= expr jmp_false_then return_if_stmt return_stmt LAMBDA_MARKER
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA POP_TOP
|
||||
conditional_lambda ::= expr jmp_false_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -251,7 +260,7 @@ class Python26Parser(Python2Parser):
|
||||
invalid = super(Python26Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
@@ -267,10 +276,10 @@ class Python26ParserSingle(Python2Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python26Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.6:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -94,6 +94,10 @@ class Python27Parser(Python2Parser):
|
||||
WITH_CLEANUP END_FINALLY
|
||||
|
||||
# Common with 2.6
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||
"""
|
||||
@@ -125,10 +129,10 @@ class Python27ParserSingle(Python27Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python27Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.7:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
@@ -144,4 +148,5 @@ if __name__ == '__main__':
|
||||
for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
@@ -18,6 +18,7 @@ that a later phase can turn into a sequence of ASCII text.
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from xdis import PYTHON3
|
||||
|
||||
class Python3Parser(PythonParser):
|
||||
|
||||
@@ -415,6 +416,13 @@ class Python3Parser(PythonParser):
|
||||
# a JUMP_ABSOLUTE with no COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_absolute_else expr
|
||||
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
conditional_lambda ::= expr jmp_false return_stmt_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
|
||||
expr ::= LOAD_CLASSNAME
|
||||
|
||||
# Python 3.4+
|
||||
@@ -425,7 +433,7 @@ class Python3Parser(PythonParser):
|
||||
@staticmethod
|
||||
def call_fn_name(token):
|
||||
"""Customize CALL_FUNCTION to add the number of positional arguments"""
|
||||
return '%s_%i' % (token.type, token.attr)
|
||||
return '%s_%i' % (token.kind, token.attr)
|
||||
|
||||
def custom_build_class_rule(self, opname, i, token, tokens, customize):
|
||||
'''
|
||||
@@ -441,16 +449,16 @@ class Python3Parser(PythonParser):
|
||||
# FIXME: I bet this can be simplified
|
||||
# look for next MAKE_FUNCTION
|
||||
for i in range(i+1, len(tokens)):
|
||||
if tokens[i].type.startswith('MAKE_FUNCTION'):
|
||||
if tokens[i].kind.startswith('MAKE_FUNCTION'):
|
||||
break
|
||||
elif tokens[i].type.startswith('MAKE_CLOSURE'):
|
||||
elif tokens[i].kind.startswith('MAKE_CLOSURE'):
|
||||
break
|
||||
pass
|
||||
assert i < len(tokens), "build_class needs to find MAKE_FUNCTION or MAKE_CLOSURE"
|
||||
assert tokens[i+1].type == 'LOAD_CONST', \
|
||||
assert tokens[i+1].kind == 'LOAD_CONST', \
|
||||
"build_class expecting CONST after MAKE_FUNCTION/MAKE_CLOSURE"
|
||||
for i in range(i, len(tokens)):
|
||||
if tokens[i].type == 'CALL_FUNCTION':
|
||||
if tokens[i].kind == 'CALL_FUNCTION':
|
||||
call_fn_tok = tokens[i]
|
||||
break
|
||||
assert call_fn_tok, "build_class custom rule needs to find CALL_FUNCTION"
|
||||
@@ -491,11 +499,11 @@ class Python3Parser(PythonParser):
|
||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||
nak = ( len(opname)-len('CALL_FUNCTION') ) // 3
|
||||
|
||||
token.type = self.call_fn_name(token)
|
||||
token.kind = self.call_fn_name(token)
|
||||
uniq_param = args_kw + args_pos
|
||||
if self.version == 3.5 and opname.startswith('CALL_FUNCTION_VAR'):
|
||||
# Python 3.5 changes the stack position of where * args, the
|
||||
# first LOAD_FAST, below are located.
|
||||
# Python 3.5 changes the stack position of *args. KW args come
|
||||
# after *args.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
if opname.endswith('KW'):
|
||||
kw = 'expr '
|
||||
@@ -503,27 +511,33 @@ class Python3Parser(PythonParser):
|
||||
kw = ''
|
||||
rule = ('call_function ::= expr expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) + kw + token.type)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
('kwarg ' * args_kw) + kw + token.kind)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
if self.version >= 3.6 and opname == 'CALL_FUNCTION_EX_KW':
|
||||
rule = ('call_function36 ::= '
|
||||
'expr build_tuple_unpack_with_call build_map_unpack_with_call '
|
||||
'CALL_FUNCTION_EX_KW_1')
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
rule = 'call_function ::= call_function36'
|
||||
else:
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.kind)
|
||||
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
if self.version >= 3.5:
|
||||
rule = ('async_call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type +
|
||||
'expr ' * nak + token.kind +
|
||||
' GET_AWAITABLE LOAD_CONST YIELD_FROM')
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.kind, uniq_param, customize)
|
||||
|
||||
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
|
||||
% (('expr ' * (args_pos-1)), opname, args_pos))
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
def add_make_function_rule(self, rule, opname, attr, customize):
|
||||
"""Python 3.3 added a an addtional LOAD_CONST before MAKE_FUNCTION and
|
||||
@@ -600,7 +614,7 @@ class Python3Parser(PythonParser):
|
||||
call_function ::= expr CALL_METHOD
|
||||
"""
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
if opname == 'PyPy':
|
||||
@@ -611,9 +625,9 @@ class Python3Parser(PythonParser):
|
||||
assign2_pypy ::= expr expr designator designator
|
||||
""", nop_func)
|
||||
continue
|
||||
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW') \
|
||||
or opname.startswith('CALL_FUNCTION_KW'):
|
||||
elif (opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_EX_KW')
|
||||
or opname.startswith('CALL_FUNCTION_KW')):
|
||||
self.custom_classfunc_rule(opname, token, customize)
|
||||
elif opname == 'LOAD_DICTCOMP':
|
||||
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||
@@ -634,6 +648,18 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = 'expr ::= build_list_unpack'
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_TUPLE_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_tuple_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_MAP_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_map_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
@@ -643,7 +669,10 @@ class Python3Parser(PythonParser):
|
||||
if opname_base == 'BUILD_TUPLE':
|
||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
rule = ('build_tuple ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse2
|
||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||
@@ -869,8 +898,11 @@ class Python3Parser(PythonParser):
|
||||
elif lhs == 'annotate_tuple':
|
||||
return not isinstance(tokens[first].attr, tuple)
|
||||
elif lhs == 'kwarg':
|
||||
return not (isinstance(tokens[first].attr, unicode) or
|
||||
isinstance(tokens[first].attr, str))
|
||||
arg = tokens[first].attr
|
||||
if PYTHON3:
|
||||
return not isinstance(arg, str)
|
||||
else:
|
||||
return not (isinstance(arg, str) or isinstance(arg, unicode))
|
||||
elif lhs == 'while1elsestmt':
|
||||
# if SETUP_LOOP target spans the else part, then this is
|
||||
# not while1else. Also do for whileTrue?
|
||||
@@ -879,7 +911,8 @@ class Python3Parser(PythonParser):
|
||||
last += 1
|
||||
return tokens[first].attr == tokens[last].offset
|
||||
elif lhs == 'while1stmt':
|
||||
if tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK'):
|
||||
if (0 <= last < len(tokens)
|
||||
and tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK')):
|
||||
# jump_back should be right afer SETUP_LOOP. Test?
|
||||
last += 1
|
||||
while last < len(tokens) and isinstance(tokens[last].offset, str):
|
||||
@@ -923,10 +956,10 @@ def info(args):
|
||||
p = Python32Parser()
|
||||
elif arg == '3.0':
|
||||
p = Python30Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
if len(sys.argv) > 1 and sys.argv[1] == 'dump':
|
||||
print('-' * 50)
|
||||
p.dumpGrammar()
|
||||
p.dump_grammar()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
@@ -42,7 +42,7 @@ class Python32Parser(Python3Parser):
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python32Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
# Check that there are 2 annotated params?
|
||||
|
@@ -29,10 +29,10 @@ class Python34ParserSingle(Python34Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python34Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.4:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -142,7 +142,7 @@ class Python35Parser(Python34Parser):
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python35Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
nargs = token.attr % 256
|
||||
map_unpack_n = "map_unpack_%s" % nargs
|
||||
@@ -152,7 +152,7 @@ class Python35Parser(Python34Parser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
call_token = tokens[i+1]
|
||||
if self.version == 3.5:
|
||||
rule = 'call_function ::= expr unmapexpr ' + call_token.type
|
||||
rule = 'call_function ::= expr unmapexpr ' + call_token.kind
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
pass
|
||||
pass
|
||||
@@ -164,10 +164,10 @@ class Python35ParserSingle(Python35Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python35Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.5:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -24,18 +24,19 @@ class Python36Parser(Python35Parser):
|
||||
|
||||
func_args36 ::= expr BUILD_TUPLE_0
|
||||
call_function ::= func_args36 unmapexpr CALL_FUNCTION_EX
|
||||
call_function ::= func_args36 build_map_unpack_with_call CALL_FUNCTION_EX_KW_1
|
||||
|
||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt POP_BLOCK LOAD_CONST
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
call_function ::= expr expr CALL_FUNCTION_EX
|
||||
call_function ::= expr expr expr CALL_FUNCTION_EX_KW
|
||||
call_function ::= expr expr expr CALL_FUNCTION_EX_KW_1
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python36Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
|
||||
if opname == 'FORMAT_VALUE':
|
||||
rules_str = """
|
||||
@@ -63,10 +64,10 @@ class Python36Parser(Python35Parser):
|
||||
|
||||
if opname.startswith('CALL_FUNCTION_KW'):
|
||||
values = 'expr ' * token.attr
|
||||
rule = 'call_function ::= expr kwargs_only_36 {token.type}'.format(**locals())
|
||||
self.add_unique_rule(rule, token.type, token.attr, customize)
|
||||
rule = 'call_function ::= expr kwargs_only_36 {token.kind}'.format(**locals())
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
rule = 'kwargs_only_36 ::= {values} LOAD_CONST'.format(**locals())
|
||||
self.add_unique_rule(rule, token.type, token.attr, customize)
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
else:
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize)
|
||||
|
||||
@@ -77,10 +78,10 @@ class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python36Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.6:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
41
uncompyle6/parsers/parse37.py
Normal file
41
uncompyle6/parsers/parse37.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2017 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3.6 for Python 3.7
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parsers.parse36 import Python37Parser
|
||||
|
||||
class Python36Parser(Python35Parser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python37Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python37ParserSingle(Python37Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python37Parser()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.7:
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
""".split()))
|
||||
remain_tokens = set(tokens) - opcode_set
|
||||
import re
|
||||
remain_tokens = set([re.sub('_\d+$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set([re.sub('_CONT$', '', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# print(sorted(p.rule2name.items()))
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -14,11 +14,13 @@ import sys
|
||||
|
||||
from uncompyle6 import PYTHON3, IS_PYPY
|
||||
from uncompyle6.scanners.tok import Token
|
||||
from xdis.bytecode import op_size
|
||||
from xdis.magics import py_str2float
|
||||
|
||||
# The byte code versions we support
|
||||
PYTHON_VERSIONS = (1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7)
|
||||
|
||||
# FIXME: DRY
|
||||
if PYTHON3:
|
||||
@@ -52,7 +54,7 @@ class Scanner(object):
|
||||
|
||||
if version in PYTHON_VERSIONS:
|
||||
if is_pypy:
|
||||
v_str = "opcode_pypy%s" % (int(version * 10))
|
||||
v_str = "opcode_%spypy" % (int(version * 10))
|
||||
else:
|
||||
v_str = "opcode_%s" % (int(version * 10))
|
||||
exec("from xdis.opcodes import %s" % v_str)
|
||||
@@ -61,6 +63,7 @@ class Scanner(object):
|
||||
raise TypeError("%s is not a Python version I know about" % version)
|
||||
|
||||
self.opname = self.opc.opname
|
||||
|
||||
# FIXME: This weird Python2 behavior is not Python3
|
||||
self.resetTokenClass()
|
||||
|
||||
@@ -86,7 +89,7 @@ class Scanner(object):
|
||||
if op is None:
|
||||
op = self.code[pos]
|
||||
target = self.get_argument(pos)
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
target += pos + 3
|
||||
return target
|
||||
|
||||
@@ -97,7 +100,7 @@ class Scanner(object):
|
||||
def print_bytecode(self):
|
||||
for i in self.op_range(0, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op in self.opc.hasjabs+self.opc.hasjrel:
|
||||
if op in self.JUMP_OPS:
|
||||
dest = self.get_target(i, op)
|
||||
print('%i\t%s\t%i' % (i, self.opname[op], dest))
|
||||
else:
|
||||
@@ -212,9 +215,6 @@ class Scanner(object):
|
||||
result.append(offset)
|
||||
return result
|
||||
|
||||
def op_hasArgument(self, op):
|
||||
return self.op_size(op) > 1
|
||||
|
||||
def op_range(self, start, end):
|
||||
"""
|
||||
Iterate through positions of opcodes, skipping
|
||||
@@ -222,26 +222,7 @@ class Scanner(object):
|
||||
"""
|
||||
while start < end:
|
||||
yield start
|
||||
start += self.op_size(self.code[start])
|
||||
|
||||
def next_offset(self, op, offset):
|
||||
return offset + self.op_size(op)
|
||||
|
||||
def op_size(self, op):
|
||||
"""
|
||||
Return size of operator with its arguments
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
if self.version >= 3.6:
|
||||
return 2
|
||||
else:
|
||||
return 1
|
||||
else:
|
||||
if self.version >= 3.6:
|
||||
return 2
|
||||
else:
|
||||
return 3
|
||||
start += op_size(self.code[start], self.opc)
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
"""
|
||||
@@ -273,13 +254,16 @@ class Scanner(object):
|
||||
self.Token = tokenClass
|
||||
return self.Token
|
||||
|
||||
def op_has_argument(op, opc):
|
||||
return op >= opc.HAVE_ARGUMENT
|
||||
|
||||
def parse_fn_counts(argc):
|
||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||
|
||||
|
||||
def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
|
||||
# If version is a string, turn that into the corresponding float.
|
||||
if isinstance(version, str):
|
||||
version = py_str2float(version)
|
||||
|
||||
# Pick up appropriate scanner
|
||||
if version in PYTHON_VERSIONS:
|
||||
v_str = "%s" % (int(version * 10))
|
||||
@@ -306,5 +290,6 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
if __name__ == "__main__":
|
||||
import inspect, uncompyle6
|
||||
co = inspect.currentframe().f_code
|
||||
scanner = get_scanner('2.7.13', True)
|
||||
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
||||
tokens, customize = scanner.ingest(co, {})
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 2.7 bytecode scanner/deparser
|
||||
|
||||
@@ -10,8 +10,8 @@ information for later use in deparsing.
|
||||
import uncompyle6.scanners.scanner27 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_pypy27
|
||||
JUMP_OPs = opcode_pypy27.JUMP_OPs
|
||||
from xdis.opcodes import opcode_27pypy
|
||||
JUMP_OPS = opcode_27pypy.JUMP_OPS
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,22 +1,18 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 3.2 bytecode scanner/deparser
|
||||
Python PyPy 3.2 decompiler scanner.
|
||||
|
||||
This overlaps Python's 3.2's dis module, but it can be run from
|
||||
Python 3 and other versions of Python. Also, we save token
|
||||
information for later use in deparsing.
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner32 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc # is this rgith?
|
||||
from xdis.opcodes import opcode_32 as opc # is this right?
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
# We base this off of 3.2
|
||||
class ScannerPyPy32(scan.Scanner32):
|
||||
def __init__(self, show_asm):
|
||||
# There are no differences in initialization between
|
||||
|
22
uncompyle6/scanners/pypy35.py
Normal file
22
uncompyle6/scanners/pypy35.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 3.2 decompiler scanner.
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner35 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPS from here
|
||||
from xdis.opcodes import opcode_35 as opc # is this right?
|
||||
JUMP_OPs = opc.JUMP_OPS
|
||||
|
||||
# We base this off of 3.5
|
||||
class ScannerPyPy35(scan.Scanner35):
|
||||
def __init__(self, show_asm):
|
||||
# There are no differences in initialization between
|
||||
# pypy 3.5 and 3.5
|
||||
scan.Scanner35.__init__(self, show_asm, is_pypy=True)
|
||||
self.version = 3.5
|
||||
return
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.5 bytecode scanner/deparser
|
||||
Python 1.5 bytecode decompiler scanner.
|
||||
|
||||
This massages tokenized 1.5 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner21 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_15
|
||||
JUMP_OPs = opcode_15.JUMP_OPs
|
||||
JUMP_OPS = opcode_15.JUMP_OPS
|
||||
|
||||
# We base this off of 2.1 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -23,20 +23,21 @@ Finally we save token information.
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
from xdis.namedtuple24 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import op_has_argument
|
||||
from uncompyle6.scanner import L65536
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import op_has_argument, op_size
|
||||
|
||||
import uncompyle6.scanner as scan
|
||||
from uncompyle6.scanner import Scanner
|
||||
|
||||
class Scanner2(scan.Scanner):
|
||||
class Scanner2(Scanner):
|
||||
def __init__(self, version, show_asm=None, is_pypy=False):
|
||||
scan.Scanner.__init__(self, version, show_asm, is_pypy)
|
||||
Scanner.__init__(self, version, show_asm, is_pypy)
|
||||
self.pop_jump_if = frozenset([self.opc.PJIF, self.opc.PJIT])
|
||||
self.jump_forward = frozenset([self.opc.JUMP_ABSOLUTE, self.opc.JUMP_FORWARD])
|
||||
# This is the 2.5+ default
|
||||
@@ -98,12 +99,18 @@ class Scanner2(scan.Scanner):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
# list of tokens/instructions
|
||||
tokens = []
|
||||
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
# and the value is the argument stack entries for that
|
||||
# nonterminal. The count is a little hoaky. It is mostly
|
||||
# not used, but sometimes it is.
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
customize = {}
|
||||
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
customize['PyPy'] = 0
|
||||
|
||||
Token = self.Token # shortcut
|
||||
|
||||
@@ -192,9 +199,9 @@ class Scanner2(scan.Scanner):
|
||||
oparg = self.get_argument(offset) + extended_arg
|
||||
extended_arg = 0
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
extended_arg = oparg * scan.L65536
|
||||
extended_arg = oparg * L65536
|
||||
continue
|
||||
if op in self.opc.hasconst:
|
||||
if op in self.opc.CONST_OPS:
|
||||
const = co.co_consts[oparg]
|
||||
if iscode(const):
|
||||
oparg = const
|
||||
@@ -215,23 +222,23 @@ class Scanner2(scan.Scanner):
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
pattr = const
|
||||
elif op in self.opc.hasname:
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.hasjrel:
|
||||
elif op in self.opc.JREL_OPS:
|
||||
# use instead: hasattr(self, 'patch_continue'): ?
|
||||
if self.version == 2.7:
|
||||
self.patch_continue(tokens, offset, op)
|
||||
pattr = repr(offset + 3 + oparg)
|
||||
elif op in self.opc.hasjabs:
|
||||
elif op in self.opc.JABS_OPS:
|
||||
# use instead: hasattr(self, 'patch_continue'): ?
|
||||
if self.version == 2.7:
|
||||
self.patch_continue(tokens, offset, op)
|
||||
pattr = repr(oparg)
|
||||
elif op in self.opc.haslocal:
|
||||
elif op in self.opc.LOCAL_OPS:
|
||||
pattr = varnames[oparg]
|
||||
elif op in self.opc.hascompare:
|
||||
elif op in self.opc.COMPARE_OPS:
|
||||
pattr = self.opc.cmp_op[oparg]
|
||||
elif op in self.opc.hasfree:
|
||||
elif op in self.opc.FREE_OPS:
|
||||
pattr = free[oparg]
|
||||
|
||||
if op in self.varargs_ops:
|
||||
@@ -333,7 +340,7 @@ class Scanner2(scan.Scanner):
|
||||
for i in self.op_range(0, n):
|
||||
op = self.code[i]
|
||||
self.prev.append(i)
|
||||
if self.op_hasArgument(op):
|
||||
if op_has_argument(op, self.opc):
|
||||
self.prev.append(i)
|
||||
self.prev.append(i)
|
||||
pass
|
||||
@@ -386,7 +393,7 @@ class Scanner2(scan.Scanner):
|
||||
if elem != code[i]:
|
||||
match = False
|
||||
break
|
||||
i += self.op_size(code[i])
|
||||
i += op_size(code[i], self.opc)
|
||||
|
||||
if match:
|
||||
i = self.prev[i]
|
||||
@@ -457,7 +464,7 @@ class Scanner2(scan.Scanner):
|
||||
self.not_continue.add(jmp)
|
||||
jmp = self.get_target(jmp)
|
||||
prev_offset = self.prev[except_match]
|
||||
# COMPARE_OP argument should be "exception match" or 10
|
||||
# COMPARE_OP argument should be "exception-match" or 10
|
||||
if (self.code[prev_offset] == self.opc.COMPARE_OP and
|
||||
self.code[prev_offset+1] != 10):
|
||||
return None
|
||||
@@ -608,7 +615,7 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in self.opc.hasjabs + self.opc.hasjrel:
|
||||
elif self.code[test] in self.opc.JUMP_OPs:
|
||||
self.ignore_if.add(test)
|
||||
test_target = self.get_target(test)
|
||||
if test_target > (jump_back+3):
|
||||
@@ -623,7 +630,7 @@ class Scanner2(scan.Scanner):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op == self.opc.SETUP_EXCEPT:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset, op)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
if target != end:
|
||||
@@ -647,7 +654,7 @@ class Scanner2(scan.Scanner):
|
||||
setup_except_nest -= 1
|
||||
elif self.code[end_finally_offset] == self.opc.SETUP_EXCEPT:
|
||||
setup_except_nest += 1
|
||||
end_finally_offset += self.op_size(code[end_finally_offset])
|
||||
end_finally_offset += op_size(code[end_finally_offset], self.opc)
|
||||
pass
|
||||
|
||||
# Add the except blocks
|
||||
@@ -848,7 +855,7 @@ class Scanner2(scan.Scanner):
|
||||
else:
|
||||
# We still have the case in 2.7 that the next instruction
|
||||
# is a jump to a SETUP_LOOP target.
|
||||
next_offset = target + self.op_size(self.code[target])
|
||||
next_offset = target + op_size(self.code[target], self.opc)
|
||||
next_op = self.code[next_offset]
|
||||
if self.op_name(next_op) == 'JUMP_FORWARD':
|
||||
jump_target = self.get_target(next_offset, next_op)
|
||||
@@ -910,7 +917,9 @@ class Scanner2(scan.Scanner):
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.not_continue.add(pre_rtarget)
|
||||
# FIXME: this is yet another case were we need dominators.
|
||||
if pre_rtarget not in self.linestartoffsets or self.version < 2.7:
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end:
|
||||
# We have an "else" block of some kind.
|
||||
@@ -997,11 +1006,11 @@ class Scanner2(scan.Scanner):
|
||||
oparg = self.get_argument(offset)
|
||||
|
||||
if label is None:
|
||||
if op in self.opc.hasjrel and self.op_name(op) != 'FOR_ITER':
|
||||
# if (op in self.opc.hasjrel and
|
||||
if op in self.opc.JREL_OPS and self.op_name(op) != 'FOR_ITER':
|
||||
# if (op in self.opc.JREL_OPS and
|
||||
# (self.version < 2.0 or op != self.opc.FOR_ITER)):
|
||||
label = offset + 3 + oparg
|
||||
elif self.version == 2.7 and op in self.opc.hasjabs:
|
||||
elif self.version == 2.7 and op in self.opc.JABS_OPS:
|
||||
if op in (self.opc.JUMP_IF_FALSE_OR_POP,
|
||||
self.opc.JUMP_IF_TRUE_OR_POP):
|
||||
if (oparg > offset):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.1 bytecode scanner/deparser
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner22 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_21
|
||||
JUMP_OPs = opcode_21.JUMP_OPs
|
||||
JUMP_OPS = opcode_21.JUMP_OPS
|
||||
|
||||
# We base this off of 2.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.2 bytecode ingester.
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner23 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_22
|
||||
JUMP_OPs = opcode_22.JUMP_OPs
|
||||
JUMP_OPS = opcode_22.JUMP_OPS
|
||||
|
||||
# We base this off of 2.3 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
@@ -30,5 +30,5 @@ class Scanner22(scan.Scanner23):
|
||||
|
||||
def ingest22(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
tokens = [t for t in tokens if t.type != 'SET_LINENO']
|
||||
tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
return tokens, customize
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.3 bytecode scanner/deparser
|
||||
|
||||
@@ -10,7 +10,7 @@ import uncompyle6.scanners.scanner24 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_23
|
||||
JUMP_OPs = opcode_23.JUMP_OPs
|
||||
JUMP_OPS = opcode_23.JUMP_OPS
|
||||
|
||||
# We base this off of 2.4 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.4 bytecode scanner/deparser
|
||||
|
||||
@@ -10,7 +10,7 @@ import uncompyle6.scanners.scanner25 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_24
|
||||
JUMP_OPs = opcode_24.JUMP_OPs
|
||||
JUMP_OPS = opcode_24.JUMP_OPS
|
||||
|
||||
# We base this off of 2.5 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.5 bytecode scanner/deparser
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner26 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_25
|
||||
JUMP_OPs = opcode_25.JUMP_OPs
|
||||
JUMP_OPS = opcode_25.JUMP_OPS
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
"""
|
||||
@@ -15,10 +15,11 @@ if PYTHON3:
|
||||
intern = sys.intern
|
||||
|
||||
import uncompyle6.scanners.scanner2 as scan
|
||||
from uncompyle6.scanner import L65536
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_26
|
||||
JUMP_OPs = opcode_26.JUMP_OPs
|
||||
JUMP_OPS = opcode_26.JUMP_OPS
|
||||
|
||||
class Scanner26(scan.Scanner2):
|
||||
def __init__(self, show_asm=False):
|
||||
@@ -180,9 +181,9 @@ class Scanner26(scan.Scanner2):
|
||||
oparg = self.get_argument(offset) + extended_arg
|
||||
extended_arg = 0
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
extended_arg = oparg * scan.L65536
|
||||
extended_arg = oparg * L65536
|
||||
continue
|
||||
if op in self.opc.hasconst:
|
||||
if op in self.opc.CONST_OPS:
|
||||
const = co.co_consts[oparg]
|
||||
# We can't use inspect.iscode() because we may be
|
||||
# using a different version of Python than the
|
||||
@@ -207,25 +208,25 @@ class Scanner26(scan.Scanner2):
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
pattr = const
|
||||
elif op in self.opc.hasname:
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.hasjrel:
|
||||
elif op in self.opc.JREL_OPS:
|
||||
pattr = repr(offset + 3 + oparg)
|
||||
if op == self.opc.JUMP_FORWARD:
|
||||
target = self.get_target(offset)
|
||||
# FIXME: this is a hack to catch stuff like:
|
||||
# if x: continue
|
||||
# the "continue" is not on a new line.
|
||||
if len(tokens) and tokens[-1].type == 'JUMP_BACK':
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
if len(tokens) and tokens[-1].kind == 'JUMP_BACK':
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
elif op in self.opc.hasjabs:
|
||||
elif op in self.opc.JABS_OPS:
|
||||
pattr = repr(oparg)
|
||||
elif op in self.opc.haslocal:
|
||||
elif op in self.opc.LOCAL_OPS:
|
||||
pattr = varnames[oparg]
|
||||
elif op in self.opc.hascompare:
|
||||
elif op in self.opc.COMPARE_OPS:
|
||||
pattr = self.opc.cmp_op[oparg]
|
||||
elif op in self.opc.hasfree:
|
||||
elif op in self.opc.FREE_OPS:
|
||||
pattr = free[oparg]
|
||||
if op in self.varargs_ops:
|
||||
# CE - Hack for >= 2.5
|
||||
@@ -257,18 +258,18 @@ class Scanner26(scan.Scanner2):
|
||||
and self.code[offset+3] not in (self.opc.END_FINALLY,
|
||||
self.opc.POP_BLOCK)):
|
||||
if ((offset in self.linestartoffsets and
|
||||
tokens[-1].type == 'JUMP_BACK')
|
||||
tokens[-1].kind == 'JUMP_BACK')
|
||||
or offset not in self.not_continue):
|
||||
op_name = 'CONTINUE'
|
||||
else:
|
||||
# FIXME: this is a hack to catch stuff like:
|
||||
# if x: continue
|
||||
# the "continue" is not on a new line.
|
||||
if tokens[-1].type == 'JUMP_BACK':
|
||||
if tokens[-1].kind == 'JUMP_BACK':
|
||||
# We need 'intern' since we have
|
||||
# already have processed the previous
|
||||
# token.
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
elif op == self.opc.LOAD_GLOBAL:
|
||||
if offset in self.load_asserts:
|
||||
|
@@ -16,7 +16,7 @@ if PYTHON3:
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_27
|
||||
JUMP_OPs = opcode_27.JUMP_OPs
|
||||
JUMP_OPS = opcode_27.JUMP_OPs
|
||||
|
||||
class Scanner27(Scanner2):
|
||||
def __init__(self, show_asm=False, is_pypy=False):
|
||||
@@ -92,9 +92,9 @@ class Scanner27(Scanner2):
|
||||
# the "continue" is not on a new line.
|
||||
n = len(tokens)
|
||||
if (n > 2 and
|
||||
tokens[-1].type == 'JUMP_BACK' and
|
||||
tokens[-1].kind == 'JUMP_BACK' and
|
||||
self.code[offset+3] == self.opc.END_FINALLY):
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
pass
|
||||
|
||||
|
@@ -23,16 +23,17 @@ Finally we save token information.
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
from xdis.namedtuple24 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import Scanner, op_has_argument
|
||||
from uncompyle6.scanner import Scanner
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.bytecode import Bytecode, op_has_argument, op_size
|
||||
from uncompyle6.scanner import Token, parse_fn_counts
|
||||
import xdis
|
||||
|
||||
# Get all the opcodes into globals
|
||||
import xdis.opcodes.opcode_33 as op3
|
||||
@@ -174,12 +175,16 @@ class Scanner3(Scanner):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
# list of tokens/instructions
|
||||
tokens = []
|
||||
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
# and the value is the argument stack entries for that
|
||||
# nonterminal. The count is a little hoaky. It is mostly
|
||||
# not used, but sometimes it is.
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
customize['PyPy'] = 0
|
||||
|
||||
self.code = array('B', co.co_code)
|
||||
self.build_lines_data(co)
|
||||
@@ -335,7 +340,7 @@ class Scanner3(Scanner):
|
||||
attr = (pos_args, name_pair_args, annotate_args)
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
opname = opname,
|
||||
attr = attr,
|
||||
pattr = pattr,
|
||||
offset = inst.offset,
|
||||
@@ -395,12 +400,12 @@ class Scanner3(Scanner):
|
||||
# the "continue" is not on a new line.
|
||||
# There are other situations where we don't catch
|
||||
# CONTINUE as well.
|
||||
if tokens[-1].type == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
if tokens[-2].type == 'BREAK_LOOP':
|
||||
if tokens[-1].kind == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
if tokens[-2].kind == 'BREAK_LOOP':
|
||||
del tokens[-1]
|
||||
else:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
if last_op_was_break and opname == 'CONTINUE':
|
||||
last_op_was_break = False
|
||||
continue
|
||||
@@ -413,7 +418,7 @@ class Scanner3(Scanner):
|
||||
last_op_was_break = opname == 'BREAK_LOOP'
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
opname = opname,
|
||||
attr = argval,
|
||||
pattr = pattr,
|
||||
offset = inst.offset,
|
||||
@@ -474,7 +479,7 @@ class Scanner3(Scanner):
|
||||
self.prev = self.prev_op = [0]
|
||||
for offset in self.op_range(0, codelen):
|
||||
op = code[offset]
|
||||
for _ in range(self.op_size(op)):
|
||||
for _ in range(op_size(op, self.opc)):
|
||||
self.prev_op.append(offset)
|
||||
|
||||
def find_jump_targets(self, debug):
|
||||
@@ -524,7 +529,7 @@ class Scanner3(Scanner):
|
||||
oparg = code[offset+1]
|
||||
else:
|
||||
oparg = code[offset+1] + code[offset+2] * 256
|
||||
next_offset = self.next_offset(op, offset)
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
|
||||
if label is None:
|
||||
if op in op3.hasjrel and op != self.opc.FOR_ITER:
|
||||
@@ -570,7 +575,7 @@ class Scanner3(Scanner):
|
||||
if elem != code[i]:
|
||||
match = False
|
||||
break
|
||||
i += self.op_size(code[i])
|
||||
i += op_size(code[i], self.opc)
|
||||
|
||||
if match is True:
|
||||
i = self.prev_op[i]
|
||||
@@ -638,11 +643,11 @@ class Scanner3(Scanner):
|
||||
rel_offset = 0
|
||||
if self.version >= 3.6:
|
||||
target = self.code[offset+1]
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
rel_offset = offset + 2
|
||||
else:
|
||||
target = self.code[offset+1] + self.code[offset+2] * 256
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
rel_offset = offset + 3
|
||||
pass
|
||||
pass
|
||||
@@ -763,7 +768,7 @@ class Scanner3(Scanner):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op in self.pop_jump_tf:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset)
|
||||
rtarget = self.restrict_to_parent(target, parent)
|
||||
prev_op = self.prev_op
|
||||
@@ -926,7 +931,7 @@ class Scanner3(Scanner):
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception-match':
|
||||
return
|
||||
if self.version >= 3.5:
|
||||
# Python 3.5 may remove as dead code a JUMP
|
||||
@@ -938,9 +943,9 @@ class Scanner3(Scanner):
|
||||
# not from SETUP_EXCEPT
|
||||
next_op = rtarget
|
||||
if code[next_op] == self.opc.POP_BLOCK:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if next_op in targets:
|
||||
for try_op in targets[next_op]:
|
||||
come_from_op = code[try_op]
|
||||
@@ -948,7 +953,7 @@ class Scanner3(Scanner):
|
||||
return
|
||||
pass
|
||||
pass
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE and self.version < 3.5:
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
else:
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
@@ -963,12 +968,12 @@ class Scanner3(Scanner):
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op == self.opc.POP_EXCEPT:
|
||||
next_offset = self.next_offset(op, offset)
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
target = self.get_target(next_offset)
|
||||
if target > next_offset:
|
||||
next_op = code[next_offset]
|
||||
if (self.opc.JUMP_ABSOLUTE == next_op and
|
||||
END_FINALLY != code[self.next_offset(next_op, next_offset)]):
|
||||
self.opc.END_FINALLY != code[xdis.next_offset(next_op, self.opc, next_offset)]):
|
||||
self.fixed_jumps[next_offset] = target
|
||||
self.except_targets[target] = next_offset
|
||||
|
||||
@@ -991,7 +996,8 @@ class Scanner3(Scanner):
|
||||
# misclassified as RETURN_END_IF. Handle that here.
|
||||
# In RETURN_VALUE, JUMP_ABSOLUTE, RETURN_VALUE is never RETURN_END_IF
|
||||
if op == self.opc.RETURN_VALUE:
|
||||
if (offset+1 < len(code) and code[offset+1] == self.opc.JUMP_ABSOLUTE and
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
if (next_offset < len(code) and code[next_offset] == self.opc.JUMP_ABSOLUTE and
|
||||
offset in self.return_end_ifs):
|
||||
self.return_end_ifs.remove(offset)
|
||||
pass
|
||||
|
@@ -8,7 +8,9 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_30 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
from xdis.bytecode import op_size
|
||||
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
||||
|
||||
@@ -116,7 +118,7 @@ class Scanner30(Scanner3):
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in opc.hasjabs+opc.hasjrel:
|
||||
elif self.code[test] in opc.JUMP_OPs:
|
||||
self.ignore_if.add(test)
|
||||
test_target = self.get_target(test)
|
||||
if test_target > (jump_back+3):
|
||||
@@ -131,7 +133,7 @@ class Scanner30(Scanner3):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op in JUMP_TF:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset)
|
||||
rtarget = self.restrict_to_parent(target, parent)
|
||||
prev_op = self.prev_op
|
||||
@@ -291,7 +293,7 @@ class Scanner30(Scanner3):
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception-match':
|
||||
return
|
||||
if self.version >= 3.5:
|
||||
# Python 3.5 may remove as dead code a JUMP
|
||||
@@ -303,9 +305,9 @@ class Scanner30(Scanner3):
|
||||
# not from SETUP_EXCEPT
|
||||
next_op = rtarget
|
||||
if code[next_op] == self.opc.POP_BLOCK:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if next_op in targets:
|
||||
for try_op in targets[next_op]:
|
||||
come_from_op = code[try_op]
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.1 bytecode scanner/deparser
|
||||
|
||||
@@ -8,7 +8,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_31 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner31(Scanner3):
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.2 bytecode scanner/deparser
|
||||
Python 3.2 bytecode decompiler scanner.
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.2 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -8,7 +11,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner32(Scanner3):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.3 bytecode scanner/deparser
|
||||
|
||||
@@ -8,7 +8,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_33 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner33(Scanner3):
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.4 bytecode scanner/deparser
|
||||
Python 3.4 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.4 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -9,7 +12,7 @@ scanner routine for Python 3.
|
||||
from xdis.opcodes import opcode_34 as opc
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.5 bytecode scanner/deparser
|
||||
Python 3.5 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.5 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -10,12 +13,12 @@ from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_35 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
class Scanner35(Scanner3):
|
||||
|
||||
def __init__(self, show_asm=None):
|
||||
Scanner3.__init__(self, 3.5, show_asm)
|
||||
def __init__(self, show_asm=None, is_pypy=False):
|
||||
Scanner3.__init__(self, 3.5, show_asm, is_pypy)
|
||||
return
|
||||
pass
|
||||
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.6 bytecode scanner/deparser
|
||||
Python 3.6 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.6 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -8,9 +11,9 @@ scanner routine for Python 3.
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
# bytecode verification, verify(), uses JUMP_OPS from here
|
||||
from xdis.opcodes import opcode_36 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
class Scanner36(Scanner3):
|
||||
|
||||
@@ -24,10 +27,14 @@ class Scanner36(Scanner3):
|
||||
# The lowest bit of flags indicates whether the
|
||||
# var-keyword argument is placed at the top of the stack
|
||||
if t.op == self.opc.CALL_FUNCTION_EX and t.attr & 1:
|
||||
t.type = 'CALL_FUNCTION_EX_KW'
|
||||
t.kind = 'CALL_FUNCTION_EX_KW'
|
||||
pass
|
||||
if t.op == self.opc.CALL_FUNCTION_KW:
|
||||
t.type = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals())
|
||||
elif t.op == self.opc.CALL_FUNCTION_KW:
|
||||
t.kind = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals())
|
||||
elif t.op == self.opc.BUILD_TUPLE_UNPACK_WITH_CALL:
|
||||
t.kind = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr
|
||||
elif t.op == self.opc.BUILD_MAP_UNPACK_WITH_CALL:
|
||||
t.kind = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
||||
|
||||
|
38
uncompyle6/scanners/scanner37.py
Normal file
38
uncompyle6/scanners/scanner37.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.7 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.6 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_36 as opc
|
||||
JUMP_OPs = opc.JUMP_OPS
|
||||
|
||||
class Scanner37(Scanner3):
|
||||
|
||||
def __init__(self, show_asm=None):
|
||||
Scanner3.__init__(self, 3.7, show_asm)
|
||||
return
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
if PYTHON_VERSION == 3.7:
|
||||
import inspect
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner37().ingest(co)
|
||||
for t in tokens:
|
||||
print(t.format())
|
||||
pass
|
||||
else:
|
||||
print("Need to be Python 3.7 to demo; I am %s." %
|
||||
PYTHON_VERSION)
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
@@ -16,13 +16,12 @@ class Token:
|
||||
the contents of one line as output by dis.dis().
|
||||
"""
|
||||
# FIXME: match Python 3.4's terms:
|
||||
# type_ should be opname
|
||||
# linestart = starts_line
|
||||
# attr = argval
|
||||
# pattr = argrepr
|
||||
def __init__(self, type_, attr=None, pattr=None, offset=-1,
|
||||
def __init__(self, opname, attr=None, pattr=None, offset=-1,
|
||||
linestart=None, op=None, has_arg=None, opc=None):
|
||||
self.type = intern(type_)
|
||||
self.kind = intern(opname)
|
||||
self.op = op
|
||||
self.has_arg = has_arg
|
||||
self.attr = attr
|
||||
@@ -37,20 +36,20 @@ class Token:
|
||||
def __eq__(self, o):
|
||||
""" '==', but it's okay if offsets and linestarts are different"""
|
||||
if isinstance(o, Token):
|
||||
# Both are tokens: compare type and attr
|
||||
# Both are tokens: compare kind and attr
|
||||
# It's okay if offsets are different
|
||||
return (self.type == o.type) and (self.pattr == o.pattr)
|
||||
return (self.kind == o.kind) and (self.pattr == o.pattr)
|
||||
else:
|
||||
return self.type == o
|
||||
return self.kind == o
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.type)
|
||||
return str(self.kind)
|
||||
|
||||
# def __str__(self):
|
||||
# pattr = self.pattr if self.pattr is not None else ''
|
||||
# prefix = '\n%3d ' % self.linestart if self.linestart else (' ' * 6)
|
||||
# return (prefix +
|
||||
# ('%9s %-18s %r' % (self.offset, self.type, pattr)))
|
||||
# ('%9s %-18s %r' % (self.offset, self.kind, pattr)))
|
||||
|
||||
def __str__(self):
|
||||
return self.format(line_prefix='')
|
||||
@@ -60,7 +59,7 @@ class Token:
|
||||
prefix = '\n%s%4d ' % (line_prefix, self.linestart)
|
||||
else:
|
||||
prefix = ' ' * (6 + len(line_prefix))
|
||||
offset_opname = '%6s %-17s' % (self.offset, self.type)
|
||||
offset_opname = '%6s %-17s' % (self.offset, self.kind)
|
||||
if not self.has_arg:
|
||||
return "%s%s" % (prefix, offset_opname)
|
||||
|
||||
@@ -71,10 +70,10 @@ class Token:
|
||||
if self.pattr:
|
||||
pattr = self.pattr
|
||||
if self.opc:
|
||||
if self.op in self.opc.hasjrel:
|
||||
if self.op in self.opc.JREL_OPS:
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + self.pattr
|
||||
elif self.op in self.opc.hasjabs:
|
||||
elif self.op in self.opc.JABS_OPS:
|
||||
self.pattr= str(self.pattr)
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + str(self.pattr)
|
||||
@@ -84,14 +83,14 @@ class Token:
|
||||
pattr = self.opc.cmp_op[self.attr]
|
||||
# And so on. See xdis/bytecode.py get_instructions_bytes
|
||||
pass
|
||||
elif re.search('_\d+$', self.type):
|
||||
elif re.search('_\d+$', self.kind):
|
||||
return "%s%s%s" % (prefix, offset_opname, argstr)
|
||||
else:
|
||||
pattr = ''
|
||||
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.type)
|
||||
return hash(self.kind)
|
||||
|
||||
def __getitem__(self, i):
|
||||
raise IndexError
|
||||
|
@@ -9,16 +9,16 @@ before reduction and don't reduce when there is a problem.
|
||||
"""
|
||||
|
||||
def checker(ast, in_loop, errors):
|
||||
in_loop = in_loop or ast.type in ('while1stmt', 'whileTruestmt',
|
||||
in_loop = in_loop or ast.kind in ('while1stmt', 'whileTruestmt',
|
||||
'whilestmt', 'whileelsestmt', 'while1elsestmt',
|
||||
'for_block')
|
||||
if ast.type in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
if ast.kind in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
text = str(ast)
|
||||
error_text = '\n# improper augmented assigment (e.g. +=, *=, ...):\n#\t' + '\n# '.join(text.split("\n")) + '\n'
|
||||
errors.append(error_text)
|
||||
|
||||
for node in ast:
|
||||
if not in_loop and node.type in ('continue_stmt', 'break_stmt'):
|
||||
if not in_loop and node.kind in ('continue_stmt', 'break_stmt'):
|
||||
text = str(node)
|
||||
error_text = '\n# not in loop:\n#\t' + '\n# '.join(text.split("\n"))
|
||||
errors.append(error_text)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""Constants used in pysource.py"""
|
||||
"""Constants and initial table values used in pysource.py and fragments.py"""
|
||||
|
||||
import re, sys
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
@@ -57,9 +57,7 @@ INDENT_PER_LEVEL = ' ' # additional intent per pretty-print level
|
||||
|
||||
TABLE_R = {
|
||||
'STORE_ATTR': ( '%c.%[1]{pattr}', 0),
|
||||
# 'STORE_SUBSCR': ( '%c[%c]', 0, 1 ),
|
||||
'DELETE_ATTR': ( '%|del %c.%[-1]{pattr}\n', 0 ),
|
||||
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
||||
}
|
||||
|
||||
TABLE_R0 = {
|
||||
@@ -67,8 +65,9 @@ TABLE_R0 = {
|
||||
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
||||
# 'CALL_FUNCTION': ( '%c(%P)', 0, (1,-1,', ') ),
|
||||
}
|
||||
|
||||
TABLE_DIRECT = {
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_SUBTRACT': ( '-' ,),
|
||||
'BINARY_MULTIPLY': ( '*' ,),
|
||||
'BINARY_DIVIDE': ( '/' ,),
|
||||
@@ -76,13 +75,13 @@ TABLE_DIRECT = {
|
||||
'BINARY_TRUE_DIVIDE': ( '/' ,), # Not in <= 2.1
|
||||
'BINARY_FLOOR_DIVIDE': ( '//' ,),
|
||||
'BINARY_MODULO': ( '%%',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_LSHIFT': ( '<<',),
|
||||
'BINARY_RSHIFT': ( '>>',),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'INPLACE_SUBTRACT': ( '-=' ,),
|
||||
'INPLACE_MULTIPLY': ( '*=' ,),
|
||||
'INPLACE_MATRIX_MULTIPLY': ( '@=' ,),
|
||||
@@ -93,125 +92,126 @@ TABLE_DIRECT = {
|
||||
'INPLACE_POWER': ( '**=',),
|
||||
'INPLACE_LSHIFT': ( '<<=',),
|
||||
'INPLACE_RSHIFT': ( '>>=',),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
|
||||
'UNARY_POSITIVE': ( '+',),
|
||||
'UNARY_NEGATIVE': ( '-',),
|
||||
'UNARY_INVERT': ( '~%c'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
'UNARY_INVERT': ( '~'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_convert': ( '`%c`', 0 ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'LOAD_CLASSNAME': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
# 'LOAD_CONST': ( '%{pattr}', ), # handled by n_LOAD_CONST
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
|
||||
'delete_subscr': ( '%|del %c[%c]\n', 0, 1,),
|
||||
'binary_subscr': ( '%c[%p]', 0, (1, 100)),
|
||||
'binary_subscr2': ( '%c[%p]', 0, (1, 100)),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
|
||||
# 'list_compr': ( '[ %c ]', -2), # handled by n_list_compr
|
||||
'list_iter': ( '%c', 0),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_iter': ( '%c', 0 ),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
|
||||
'comp_iter': ( '%c', 0),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'comp_iter': ( '%c', 0 ),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'set_comp_body': ( '%c', 0 ),
|
||||
'gen_comp_body': ( '%c', 0 ),
|
||||
'dict_comp_body': ( '%c:%c', 1, 0 ),
|
||||
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
|
||||
# The 2nd parameter should have a = suffix.
|
||||
# There is a rule with a 4th parameter "designator"
|
||||
# which we don't use here.
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4 ),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'and': ( '%c and %c', 0, 2 ),
|
||||
'ret_and': ( '%c and %c', 0, 2 ),
|
||||
'and2': ( '%c', 3 ),
|
||||
'or': ( '%c or %c', 0, 2 ),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27)),
|
||||
'conditionalTrue': ( '%p if 1 else %p', (0, 27), (2, 27)),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27)),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27)),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27)),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27) ),
|
||||
'conditionalTrue': ( '%p if 1 else %p', (0, 27), (2, 27) ),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27) ),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27) ),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27) ),
|
||||
'conditional_lambda': ( '(%c if %c else %c)', 2, 0, 3),
|
||||
'return_lambda': ('%c', 0),
|
||||
'compare': ( '%p %[-1]{pattr} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 29), (1, 30)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
|
||||
'compare': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 29), (1, 30)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
# 'classdef': (), # handled by n_classdef()
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdefdeco': ( '\n\n%c', 0),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco0': ( '%|def %c\n', 0),
|
||||
'classdefdeco': ( '\n\n%c', 0),
|
||||
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2 ), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2 ),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'print_to_items': ( '%C', (0, 2, ', ') ),
|
||||
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'continue_stmt': ( '%|continue\n', ),
|
||||
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
# 'yield': ( 'yield %c', 0),
|
||||
# 'return_stmt': ( '%|return %c\n', 0),
|
||||
'return_if_stmt': ( 'return %c\n', 0),
|
||||
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmtl': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
@@ -229,37 +229,37 @@ TABLE_DIRECT = {
|
||||
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
|
||||
|
||||
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
|
||||
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2 ),
|
||||
'forelselaststmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2),
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2 ),
|
||||
'forelselaststmtl': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2 ),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
||||
'except': ( '%|except:\n%+%c%-', 3 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
}
|
||||
|
||||
|
||||
@@ -276,7 +276,7 @@ MAP = {
|
||||
}
|
||||
|
||||
# Operator precidence
|
||||
# See https://docs.python.org/3/reference/expressions.html
|
||||
# See https://docs.python.org/2/reference/expressions.html
|
||||
# or https://docs.python.org/3/reference/expressions.html
|
||||
# for a list.
|
||||
PRECEDENCE = {
|
||||
@@ -332,6 +332,7 @@ PRECEDENCE = {
|
||||
'ret_or': 26,
|
||||
|
||||
'conditional': 28,
|
||||
'conditional_lamdba': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
"""
|
||||
@@ -8,8 +6,8 @@ Creates Python source code from an uncompyle6 abstract syntax tree,
|
||||
and indexes fragments which can be accessed by instruction offset
|
||||
address.
|
||||
|
||||
See the comments in pysource for information on the abstract sytax tree
|
||||
and how semantic actions are written.
|
||||
See https://github.com/rocky/python-uncompyle6/wiki/Table-driven-semantic-actions.
|
||||
for a more complete explanation, nicely marked up and with examples.
|
||||
|
||||
We add some format specifiers here not used in pysource
|
||||
|
||||
@@ -40,11 +38,12 @@ do it recursively which is where offsets are probably located.
|
||||
2. %b
|
||||
-----
|
||||
|
||||
%b associates the text from the previous start node up to what we have now
|
||||
%b associates the text from the specified index to what we have now.
|
||||
it takes an integer argument.
|
||||
|
||||
For example in:
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
|
||||
n
|
||||
The node position 0 will be associated with "import".
|
||||
|
||||
"""
|
||||
@@ -53,44 +52,39 @@ The node position 0 will be associated with "import".
|
||||
|
||||
import re, sys
|
||||
|
||||
from uncompyle6 import PYTHON3, IS_PYPY, PYTHON_VERSION
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.semantics import pysource
|
||||
from uncompyle6 import parser
|
||||
from uncompyle6.scanner import Token, Code, get_scanner
|
||||
from uncompyle6.semantics.check_ast import checker
|
||||
from uncompyle6.semantics.helper import print_docstring
|
||||
|
||||
from uncompyle6.show import (
|
||||
maybe_show_asm,
|
||||
maybe_show_ast,
|
||||
maybe_show_ast_param_default,
|
||||
)
|
||||
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
|
||||
from uncompyle6.semantics.pysource import (
|
||||
ParserError, find_globals, StringIO)
|
||||
ParserError, StringIO)
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
INDENT_PER_LEVEL, NONE, PRECEDENCE,
|
||||
TABLE_DIRECT, escape, minint, MAP
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.make_function import (
|
||||
find_all_globals, find_none, code_has_star_arg, code_has_star_star_arg
|
||||
)
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from spark_parser.ast import GenericASTTraversalPruningException
|
||||
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
if PYTHON_VERSION < 2.6:
|
||||
from xdis.namedtuple25 import namedtuple
|
||||
from xdis.namedtuple24 import namedtuple
|
||||
else:
|
||||
from collections import namedtuple
|
||||
|
||||
NodeInfo = namedtuple("NodeInfo", "node start finish")
|
||||
ExtractInfo = namedtuple("ExtractInfo",
|
||||
"lineNo lineStartOffset markerLine selectedLine selectedText")
|
||||
"lineNo lineStartOffset markerLine selectedLine selectedText nonterminal")
|
||||
|
||||
TABLE_DIRECT_FRAGMENT = {
|
||||
'break_stmt': ( '%|%rbreak\n', ),
|
||||
@@ -101,9 +95,8 @@ TABLE_DIRECT_FRAGMENT = {
|
||||
'importfrom': ( '%|from %[2]{pattr}%x import %c\n', (2, (0, 1)), 3),
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
'list_for': (' for %c%x in %c%c', 2, (2, (1, )), 0, 3 ),
|
||||
'forstmt': ( '%|for%b %c%x in %c:\n%+%c%-\n\n', 0, 3, (3, (2, )), 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c%x:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmt': (
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmtl': (
|
||||
@@ -168,8 +161,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def set_pos_info(self, node, start, finish, name=None):
|
||||
if name is None: name = self.name
|
||||
if hasattr(node, 'offset'):
|
||||
self.offsets[name, node.offset] = \
|
||||
NodeInfo(node = node, start = start, finish = finish)
|
||||
node.start = start
|
||||
node.finish = finish
|
||||
self.offsets[name, node.offset] = node
|
||||
|
||||
if hasattr(node, 'parent'):
|
||||
assert node.parent != node
|
||||
@@ -185,6 +179,34 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
return
|
||||
|
||||
def table_r_node(self, node):
|
||||
"""General pattern where the last node should should
|
||||
get the text span attributes of the entire tree"""
|
||||
start = len(self.f.getvalue())
|
||||
try:
|
||||
self.default(node)
|
||||
except GenericASTTraversalPruningException:
|
||||
final = len(self.f.getvalue())
|
||||
self.set_pos_info(node, start, final)
|
||||
self.set_pos_info(node[-1], start, final)
|
||||
raise GenericASTTraversalPruningException
|
||||
|
||||
n_slice0 = n_slice1 = n_slice2 = n_slice3 = n_binary_subscr = table_r_node
|
||||
n_augassign_1 = n_print_item = exec_stmt = print_to_item = del_stmt = table_r_node
|
||||
n_classdefco1 = n_classdefco2 = except_cond1 = except_cond2 = table_r_node
|
||||
|
||||
def n_passtmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
self.set_pos_info(node, start, start+len("pass"))
|
||||
self.default(node)
|
||||
|
||||
def n_trystmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
self.set_pos_info(node[0], start, start+len("try:"))
|
||||
self.default(node)
|
||||
|
||||
n_tryelsestmt = n_tryelsestmtc = n_tryelsestmtl = n_tryfinallystmt = n_trystmt
|
||||
|
||||
def n_return_stmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
if self.params['isLambda']:
|
||||
@@ -238,6 +260,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(' ')
|
||||
node[0].parent = node
|
||||
self.preorder(node[0])
|
||||
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.prune() # stop recursing
|
||||
|
||||
@@ -285,11 +308,11 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def n_expr(self, node):
|
||||
start = len(self.f.getvalue())
|
||||
p = self.prec
|
||||
if node[0].type.startswith('binary_expr'):
|
||||
if node[0].kind.startswith('binary_expr'):
|
||||
n = node[0][-1][0]
|
||||
else:
|
||||
n = node[0]
|
||||
self.prec = PRECEDENCE.get(n.type, -2)
|
||||
self.prec = PRECEDENCE.get(n.kind, -2)
|
||||
if n == 'LOAD_CONST' and repr(n.pattr)[0] == '-':
|
||||
n.parent = node
|
||||
self.set_pos_info(n, start, len(self.f.getvalue()))
|
||||
@@ -375,15 +398,23 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(sep); sep = ", "
|
||||
self.preorder(subnode)
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
|
||||
self.println()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def n_ifelsestmtr(self, node):
|
||||
if len(node[2]) != 2:
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.kind = 'ifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
if len(return_stmts_node) != 2:
|
||||
self.default(node)
|
||||
|
||||
if not (node[2][0][0][0] == 'ifstmt' and node[2][0][0][0][1][0] == 'return_if_stmts') \
|
||||
and not (node[2][0][-1][0] == 'ifstmt' and node[2][0][-1][0][1][0] == 'return_if_stmts'):
|
||||
if (not (return_stmts_node[0][0][0] == 'ifstmt'
|
||||
and return_stmts_node[0][0][0][1][0] == 'return_if_stmts')
|
||||
and not (return_stmts_node[0][-1][0] == 'ifstmt'
|
||||
and return_stmts_node[0][-1][0][1][0] == 'return_if_stmts')):
|
||||
self.default(node)
|
||||
return
|
||||
|
||||
@@ -391,10 +422,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(self.indent, 'if ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[1].parent = node
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
if_ret_at_end = False
|
||||
if len(node[2][0]) >= 3:
|
||||
@@ -404,26 +435,26 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
past_else = False
|
||||
prev_stmt_is_if_ret = True
|
||||
for n in node[2][0]:
|
||||
for n in return_stmts_node[0]:
|
||||
if (n[0] == 'ifstmt' and n[0][1][0] == 'return_if_stmts'):
|
||||
if prev_stmt_is_if_ret:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
prev_stmt_is_if_ret = True
|
||||
else:
|
||||
prev_stmt_is_if_ret = False
|
||||
if not past_else and not if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
past_else = True
|
||||
n.parent = node
|
||||
self.preorder(n)
|
||||
if not past_else or if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[2][1].parent = node
|
||||
self.preorder(node[2][1])
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
|
||||
def n_elifelsestmtr(self, node):
|
||||
@@ -440,20 +471,20 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
node[0].parent = node
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[1].parent = node
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
for n in node[2][0]:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
n.parent = node
|
||||
self.preorder(n)
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[2][1].parent = node
|
||||
self.preorder(node[2][1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.prune()
|
||||
|
||||
@@ -462,7 +493,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
iname = node[0].pattr
|
||||
|
||||
store_import_node = node[-1][-1]
|
||||
assert store_import_node.type.startswith('STORE_')
|
||||
assert store_import_node.kind.startswith('STORE_')
|
||||
|
||||
sname = store_import_node.pattr
|
||||
self.write(iname)
|
||||
@@ -486,17 +517,20 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# LOAD_CONST code object ..
|
||||
# LOAD_CONST 'x0' if >= 3.3
|
||||
# MAKE_FUNCTION ..
|
||||
code_index = -3
|
||||
code_node = node[-3]
|
||||
elif node[-2] == 'expr':
|
||||
code_node = node[-2][0]
|
||||
else:
|
||||
# LOAD_CONST code object ..
|
||||
# MAKE_FUNCTION ..
|
||||
code_index = -2
|
||||
code = node[code_index]
|
||||
func_name = code.attr.co_name
|
||||
code_node = node[-2]
|
||||
func_name = code_node.attr.co_name
|
||||
self.write(func_name)
|
||||
self.set_pos_info(code_node, start, len(self.f.getvalue()))
|
||||
|
||||
self.indentMore()
|
||||
self.make_function(node, isLambda=False, codeNode=code)
|
||||
self.indent_more()
|
||||
start = len(self.f.getvalue())
|
||||
self.make_function(node, isLambda=False, codeNode=code_node)
|
||||
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
|
||||
@@ -504,7 +538,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def n_list_compr(self, node):
|
||||
@@ -520,7 +554,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
elif n == 'list_if': n = n[2]
|
||||
elif n == 'list_if_not': n= n[2]
|
||||
assert n == 'lc_body'
|
||||
if node[0].type.startswith('BUILD_LIST'):
|
||||
if node[0].kind.startswith('BUILD_LIST'):
|
||||
start = len(self.f.getvalue())
|
||||
self.set_pos_info(node[0], start, start+1)
|
||||
self.write( '[ ')
|
||||
@@ -653,7 +687,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.type in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert designator, "Couldn't find designator in list/set comprehension"
|
||||
|
||||
old_name = self.name
|
||||
@@ -680,7 +714,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.preorder(if_node)
|
||||
self.prec = p
|
||||
self.name = old_name
|
||||
if node[-1].type.startswith('CALL_FUNCTION'):
|
||||
if node[-1].kind.startswith('CALL_FUNCTION'):
|
||||
self.set_pos_info(node[-1], gen_start, len(self.f.getvalue()))
|
||||
|
||||
def listcomprehension_walk2(self, node):
|
||||
@@ -709,7 +743,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -755,7 +789,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
start = len(self.f.getvalue())
|
||||
self.set_pos_info(node[0], start-1, start)
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
elif node[0].type == 'load_closure':
|
||||
elif node[0].kind == 'load_closure':
|
||||
self.setcomprehension_walk3(node, collection_index=4)
|
||||
else:
|
||||
self.comprehension_walk(node, iter_index=4)
|
||||
@@ -774,7 +808,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.set_pos_info(node[0], start, len(self.f.getvalue()))
|
||||
self.write(': {')
|
||||
start = len(self.f.getvalue())
|
||||
assert node[0].type.startswith('BUILD_SET')
|
||||
assert node[0].kind.startswith('BUILD_SET')
|
||||
self.set_pos_info(node[0], start-1, start)
|
||||
designator = node[3]
|
||||
assert designator == 'designator'
|
||||
@@ -783,7 +817,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
fin = len(self.f.getvalue())
|
||||
self.set_pos_info(designator, start, fin)
|
||||
for_iter_node = node[2]
|
||||
assert for_iter_node.type == 'FOR_ITER'
|
||||
assert for_iter_node.kind == 'FOR_ITER'
|
||||
self.set_pos_info(for_iter_node, start, fin)
|
||||
self.write(" for ")
|
||||
self.preorder(designator)
|
||||
@@ -802,7 +836,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
def n_listcomp(self, node):
|
||||
self.write('[')
|
||||
if node[0].type == 'load_closure':
|
||||
if node[0].kind == 'load_closure':
|
||||
self.listcomprehension_walk2(node)
|
||||
else:
|
||||
if node[0] == 'LOAD_LISTCOMP':
|
||||
@@ -816,7 +850,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
if len(node) > 1:
|
||||
if (node[0] == 'c_stmts_opt' and
|
||||
node[0][0] == 'passstmt' and
|
||||
node[1].type.startswith('JUMP_FORWARD')):
|
||||
node[1].kind.startswith('JUMP_FORWARD')):
|
||||
self.set_pos_info(node[1], node[0][0].start, node[0][0].finish)
|
||||
|
||||
def setcomprehension_walk3(self, node, collection_index):
|
||||
@@ -847,7 +881,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -953,9 +987,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.println(':')
|
||||
|
||||
# class body
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.build_class(subclass)
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
self.currentclass = cclass
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
@@ -1010,8 +1044,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# NOTE: this differs from behavior in pysource.py
|
||||
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-2].type != 'LOAD_CONST':
|
||||
if tokens[-1].kind == 'RETURN_VALUE':
|
||||
if tokens[-2].kind != 'LOAD_CONST':
|
||||
tokens.append(Token('RETURN_LAST'))
|
||||
if len(tokens) == 0:
|
||||
return
|
||||
@@ -1101,7 +1135,6 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def traverse(self, node, indent=None, isLambda=False):
|
||||
'''Buulds up fragment which can be used inside a larger
|
||||
block of code'''
|
||||
|
||||
self.param_stack.append(self.params)
|
||||
if indent is None: indent = self.indent
|
||||
p = self.pending_newlines
|
||||
@@ -1196,16 +1229,38 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
if elided: selectedLine += ' ...'
|
||||
|
||||
if isinstance(nodeInfo, Token):
|
||||
nodeInfo = nodeInfo.parent
|
||||
else:
|
||||
nodeInfo = nodeInfo
|
||||
|
||||
if isinstance(nodeInfo, AST):
|
||||
nonterminal = nodeInfo[0]
|
||||
else:
|
||||
nonterminal = nodeInfo.node
|
||||
|
||||
return ExtractInfo(lineNo = len(lines), lineStartOffset = lineStart,
|
||||
markerLine = markerLine,
|
||||
selectedLine = selectedLine,
|
||||
selectedText = selectedText)
|
||||
selectedText = selectedText,
|
||||
nonterminal = nonterminal)
|
||||
|
||||
def extract_line_info(self, name, offset):
|
||||
if (name, offset) not in list(self.offsets.keys()):
|
||||
return None
|
||||
return self.extract_node_info(self.offsets[name, offset])
|
||||
|
||||
def prev_node(self, node):
|
||||
prev = None
|
||||
if not hasattr(node, 'parent'):
|
||||
return prev
|
||||
p = node.parent
|
||||
for n in p:
|
||||
if node == n:
|
||||
return prev
|
||||
prev = n
|
||||
return prev
|
||||
|
||||
def extract_parent_info(self, node):
|
||||
if not hasattr(node, 'parent'):
|
||||
return None, None
|
||||
@@ -1244,10 +1299,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# as a custom rule
|
||||
start = len(self.f.getvalue())
|
||||
n = len(node)-1
|
||||
assert node[n].type.startswith('CALL_FUNCTION')
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].type in ['expr', 'LOAD_CLASSNAME']:
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
|
||||
@@ -1275,14 +1330,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
line_seperator = ',\n' + self.indent
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
start = len(self.f.getvalue())
|
||||
self.write('{')
|
||||
|
||||
if self.version > 3.0:
|
||||
if node[0].type.startswith('kvlist'):
|
||||
if node[0].kind.startswith('kvlist'):
|
||||
# Python 3.5+ style key/value list in mapexpr
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
@@ -1297,11 +1352,11 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
i += 2
|
||||
pass
|
||||
pass
|
||||
elif node[1].type.startswith('kvlist'):
|
||||
elif node[1].kind.startswith('kvlist'):
|
||||
# Python 3.0..3.4 style key/value list in mapexpr
|
||||
kv_node = node[1]
|
||||
l = list(kv_node)
|
||||
if len(l) > 0 and l[0].type == 'kv3':
|
||||
if len(l) > 0 and l[0].kind == 'kv3':
|
||||
# Python 3.2 does this
|
||||
kv_node = node[1][0]
|
||||
l = list(kv_node)
|
||||
@@ -1324,7 +1379,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
pass
|
||||
else:
|
||||
# Python 2 style kvlist
|
||||
assert node[-1].type.startswith('kvlist')
|
||||
assert node[-1].kind.startswith('kvlist')
|
||||
kv_node = node[-1] # goto kvlist
|
||||
|
||||
for kv in kv_node:
|
||||
@@ -1352,7 +1407,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n.parent = node
|
||||
self.set_pos_info(n, start, finish)
|
||||
self.set_pos_info(node, start, finish)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
@@ -1363,7 +1418,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
n = node.pop()
|
||||
lastnode = n.type
|
||||
lastnode = n.kind
|
||||
start = len(self.f.getvalue())
|
||||
if lastnode.startswith('BUILD_LIST'):
|
||||
self.write('['); endchar = ']'
|
||||
@@ -1388,7 +1443,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
@@ -1413,14 +1468,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n.parent = node.parent
|
||||
self.set_pos_info(n, start, finish)
|
||||
self.set_pos_info(node, start, finish)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
def engine(self, entry, startnode):
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
%c, %C, and so on.
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
|
||||
# print("-----")
|
||||
@@ -1457,8 +1512,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write('%')
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
|
||||
elif typ == '+': self.indentMore()
|
||||
elif typ == '-': self.indentLess()
|
||||
elif typ == '+': self.indent_more()
|
||||
elif typ == '-': self.indent_less()
|
||||
elif typ == '|': self.write(self.indent)
|
||||
# no longer used, since BUILD_TUPLE_n is pretty printed:
|
||||
elif typ == 'r': recurse_node = True
|
||||
@@ -1478,7 +1533,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# for loops have two positions that correspond to a single text
|
||||
# location. In "for i in ..." there is the initialization "i" code as well
|
||||
# as the iteration code with "i"
|
||||
match = re.search(r'^for', startnode.type)
|
||||
match = re.search(r'^for', startnode.kind)
|
||||
if match and entry[arg] == 3:
|
||||
self.set_pos_info(node[0], start, finish)
|
||||
for n in node[2]:
|
||||
@@ -1569,25 +1624,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.set_pos_info(startnode, startnode_start, fin)
|
||||
|
||||
# FIXME rocky: figure out how to get these casess to be table driven.
|
||||
#
|
||||
# 1. for loops. For loops have two positions that correspond to a single text
|
||||
# location. In "for i in ..." there is the initialization "i" code as well
|
||||
# as the iteration code with "i". A "copy" spec like %X3,3 - copy parame
|
||||
# 3 to param 2 would work
|
||||
#
|
||||
# 2. subroutine calls. It the last op is the call and for purposes of printing
|
||||
# we don't need to print anything special there. However it encompases the
|
||||
# entire string of the node fn(...)
|
||||
match = re.search(r'^try', startnode.type)
|
||||
match = re.search(r'^call_function', startnode.kind)
|
||||
if match:
|
||||
self.set_pos_info(node[0], startnode_start, startnode_start+len("try:"))
|
||||
self.set_pos_info(node[2], node[3].finish, node[3].finish)
|
||||
else:
|
||||
match = re.search(r'^call_function', startnode.type)
|
||||
if match:
|
||||
last_node = startnode[-1]
|
||||
# import traceback; traceback.print_stack()
|
||||
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
||||
last_node = startnode[-1]
|
||||
# import traceback; traceback.print_stack()
|
||||
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
||||
return
|
||||
|
||||
@classmethod
|
||||
@@ -1669,6 +1713,13 @@ def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
||||
if deparsed.ERROR:
|
||||
raise deparsed.ERROR
|
||||
|
||||
# To keep the API consistent with previous releases, convert
|
||||
# deparse.offset values into NodeInfo items
|
||||
for tup, node in deparsed.offsets.items():
|
||||
deparsed.offsets[tup] = NodeInfo(node = node, start = node.start,
|
||||
finish = node.finish)
|
||||
|
||||
deparsed.scanner = scanner
|
||||
return deparsed
|
||||
|
||||
from bisect import bisect_right
|
||||
@@ -1690,7 +1741,7 @@ def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
deparsed = deparse_code(version, co, out, showasm, showast, showgrammar, is_pypy)
|
||||
if (name, offset) in deparsed.offsets.keys():
|
||||
# This is the easy case
|
||||
return deparsed.offsets[name, offset]
|
||||
return deparsed
|
||||
|
||||
valid_offsets = [t for t in deparsed.offsets if isinstance(t[1], int)]
|
||||
offset_list = sorted([t[1] for t in valid_offsets if t[0] == name])
|
||||
@@ -1703,6 +1754,7 @@ def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
from uncompyle6 import IS_PYPY
|
||||
def deparse_test(co, is_pypy=IS_PYPY):
|
||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
walk = deparse_code(sys_version, co, showasm=False, showast=False,
|
||||
@@ -1716,7 +1768,7 @@ if __name__ == '__main__':
|
||||
nodeInfo = walk.offsets[name, offset]
|
||||
node = nodeInfo.node
|
||||
extractInfo = walk.extract_node_info(node)
|
||||
print("code: %s" % node.type)
|
||||
print("code: %s" % node.kind)
|
||||
# print extractInfo
|
||||
print(extractInfo.selectedText)
|
||||
print(extractInfo.selectedLine)
|
||||
@@ -1726,7 +1778,7 @@ if __name__ == '__main__':
|
||||
print("Contained in...")
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
print("code: %s" % p.type)
|
||||
print("code: %s" % p.kind)
|
||||
print('=' * 40)
|
||||
pass
|
||||
pass
|
||||
@@ -1745,7 +1797,7 @@ if __name__ == '__main__':
|
||||
nodeInfo = walk.offsets[name, offset]
|
||||
node = nodeInfo.node
|
||||
extractInfo = walk.extract_node_info(node)
|
||||
print("code: %s" % node.type)
|
||||
print("code: %s" % node.kind)
|
||||
# print extractInfo
|
||||
print(extractInfo.selectedText)
|
||||
print(extractInfo.selectedLine)
|
||||
@@ -1755,7 +1807,7 @@ if __name__ == '__main__':
|
||||
print("Contained in...")
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
print("code: %s" % p.type)
|
||||
print("code: %s" % p.kind)
|
||||
print('=' * 40)
|
||||
pass
|
||||
pass
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions
|
||||
"""
|
||||
from xdis.code import iscode
|
||||
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
@@ -17,7 +17,7 @@ def find_all_globals(node, globs):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
@@ -26,7 +26,7 @@ def find_globals(node, globs):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
@@ -36,21 +36,10 @@ def find_none(node):
|
||||
if n not in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.type == 'LOAD_CONST' and n.pattr is None:
|
||||
elif n.kind == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
# FIXME: put this in xdis
|
||||
def code_has_star_arg(code):
|
||||
"""Return True iff
|
||||
the code object has a variable positional parameter (*args-like)"""
|
||||
return (code.co_flags & 4) != 0
|
||||
|
||||
def code_has_star_star_arg(code):
|
||||
"""Return True iff
|
||||
The code object has a variable keyword parameter (**kwargs-like)."""
|
||||
return (code.co_flags & 8) != 0
|
||||
|
||||
# FIXME: DRY the below code...
|
||||
|
||||
def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
@@ -75,7 +64,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
annotate_tuple = None
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
@@ -91,7 +80,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
i = -1
|
||||
j = annotate_last-1
|
||||
l = -len(node)
|
||||
while j >= l and node[j].type in ('annotate_arg' 'annotate_tuple'):
|
||||
while j >= l and node[j].kind in ('annotate_arg' 'annotate_tuple'):
|
||||
annotate_args[annotate_tup[i]] = node[j][0]
|
||||
i -= 1
|
||||
j -= 1
|
||||
@@ -117,7 +106,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -158,6 +147,9 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
suffix = ''
|
||||
|
||||
no_paramnames = len(paramnames[:i]) == 0
|
||||
|
||||
for param in paramnames[:i]:
|
||||
self.write(suffix, param)
|
||||
suffix = ', '
|
||||
@@ -180,6 +172,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
suffix = ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
no_paramnames = False
|
||||
self.write(suffix)
|
||||
param = paramnames[i]
|
||||
self.write(param)
|
||||
@@ -187,7 +180,11 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
aa = annotate_args[param]
|
||||
if isinstance(aa, tuple):
|
||||
aa = aa[0]
|
||||
self.write(': "%s"' % aa)
|
||||
self.write(': "%s"' % aa)
|
||||
elif isinstance(aa, AST):
|
||||
self.write(': ')
|
||||
self.preorder(aa)
|
||||
|
||||
self.write('=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
@@ -200,64 +197,65 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
if kw_args + annotate_argc > 0:
|
||||
if not code_has_star_arg(code):
|
||||
if argc > 0:
|
||||
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
i += 1
|
||||
if no_paramnames:
|
||||
if not code_has_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
pass
|
||||
annotate_args = []
|
||||
for n in node:
|
||||
if n == 'annotate_arg':
|
||||
annotate_args.append(n[0])
|
||||
elif n == 'annotate_tuple':
|
||||
t = n[0].attr
|
||||
if t[-1] == 'return':
|
||||
t = t[0:-1]
|
||||
annotate_args = annotate_args[:-1]
|
||||
pass
|
||||
last = len(annotate_args) - 1
|
||||
for i in range(len(annotate_args)):
|
||||
self.write("%s: " % (t[i]))
|
||||
self.preorder(annotate_args[i])
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
pass
|
||||
i += 1
|
||||
pass
|
||||
break
|
||||
pass
|
||||
annotate_args = []
|
||||
for n in node:
|
||||
if n == 'annotate_arg':
|
||||
annotate_args.append(n[0])
|
||||
elif n == 'annotate_tuple':
|
||||
t = n[0].attr
|
||||
if t[-1] == 'return':
|
||||
t = t[0:-1]
|
||||
annotate_args = annotate_args[:-1]
|
||||
pass
|
||||
last = len(annotate_args) - 1
|
||||
for i in range(len(annotate_args)):
|
||||
self.write("%s: " % (t[i]))
|
||||
self.preorder(annotate_args[i])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
pass
|
||||
pass
|
||||
break
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if 'return' in annotate_tuple[0].attr:
|
||||
if (line_number != self.line_number):
|
||||
if (line_number != self.line_number) and not no_paramnames:
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write(' -> ')
|
||||
@@ -320,7 +318,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
@@ -336,7 +334,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -404,7 +402,8 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
if argc + kw_pairs > 0:
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
@@ -451,7 +450,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
@@ -485,7 +484,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -586,7 +585,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
continue
|
||||
elif self.version >= 3.4 and not (n.type in ('kwargs', 'kwarg')):
|
||||
elif self.version >= 3.4 and not (n.kind in ('kwargs', 'kwarg')):
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
|
@@ -11,62 +11,102 @@ and what they mean).
|
||||
|
||||
Upper levels of the grammar is a more-or-less conventional grammar for
|
||||
Python.
|
||||
|
||||
Semantic action rules for nonterminal symbols can be specified here by
|
||||
creating a method prefaced with "n_" for that nonterminal. For
|
||||
example, "n_exec_stmt" handles the semantic actions for the
|
||||
"exec_smnt" nonterminal symbol. Similarly if a method with the name
|
||||
of the nonterminal is suffixed with "_exit" it will be called after
|
||||
all of its children are called.
|
||||
|
||||
Another other way to specify a semantic rule for a nonterminal is via
|
||||
rule given in one of the tables MAP_R0, MAP_R, or MAP_DIRECT.
|
||||
|
||||
These uses a printf-like syntax to direct substitution from attributes
|
||||
of the nonterminal and its children..
|
||||
|
||||
The rest of the below describes how table-driven semantic actions work
|
||||
and gives a list of the format specifiers. The default() and engine()
|
||||
methods implement most of the below.
|
||||
|
||||
Step 1 determines a table (T) and a path to a
|
||||
table key (K) from the node type (N) (other nodes are shown as O):
|
||||
|
||||
N N N&K
|
||||
/ | ... \ / | ... \ / | ... \
|
||||
O O O O O K O O O
|
||||
|
|
||||
K
|
||||
|
||||
MAP_R0 (TABLE_R0) MAP_R (TABLE_R) MAP_DIRECT (TABLE_DIRECT)
|
||||
|
||||
The default is a direct mapping. The key K is then extracted from the
|
||||
subtree and used to find a table entry T[K], if any. The result is a
|
||||
format string and arguments (a la printf()) for the formatting engine.
|
||||
Escapes in the format string are:
|
||||
|
||||
%c evaluate children N[A] recursively*
|
||||
%C evaluate children N[A[0]]..N[A[1]-1] recursively, separate by A[2]*
|
||||
%P same as %C but sets operator precedence
|
||||
%D same as %C but is for left-recursive lists like kwargs which
|
||||
goes to epsilon at the beginning. Using %C an extra separator
|
||||
with an epsilon appears at the beginning
|
||||
%, print ',' if last %C only printed one item. This is mostly for tuples
|
||||
on the LHS of an assignment statement since BUILD_TUPLE_n pretty-prints
|
||||
other tuples.
|
||||
%| tab to current indentation level
|
||||
%+ increase current indentation level
|
||||
%- decrease current indentation level
|
||||
%{...} evaluate ... in context of N
|
||||
%% literal '%'
|
||||
%p evaluate N setting precedence
|
||||
|
||||
|
||||
* indicates an argument (A) required.
|
||||
|
||||
The '%' may optionally be followed by a number (C) in square brackets, which
|
||||
makes the engine walk down to N[C] before evaluating the escape code.
|
||||
"""
|
||||
|
||||
# The below is a bit long, but still it is somehwat abbreviated.
|
||||
# See https://github.com/rocky/python-uncompyle6/wiki/Table-driven-semantic-actions.
|
||||
# for a more complete explanation, nicely marked up and with examples.
|
||||
#
|
||||
#
|
||||
# Semantic action rules for nonterminal symbols can be specified here by
|
||||
# creating a method prefaced with "n_" for that nonterminal. For
|
||||
# example, "n_exec_stmt" handles the semantic actions for the
|
||||
# "exec_stmt" nonterminal symbol. Similarly if a method with the name
|
||||
# of the nonterminal is suffixed with "_exit" it will be called after
|
||||
# all of its children are called.
|
||||
#
|
||||
# After a while writing methods this way, you'll find many routines which do similar
|
||||
# sorts of things, and soon you'll find you want a short notation to
|
||||
# describe rules and not have to create methods at all.
|
||||
#
|
||||
# So another other way to specify a semantic rule for a nonterminal is via
|
||||
# one of the tables MAP_R0, MAP_R, or MAP_DIRECT where the key is the
|
||||
# nonterminal name.
|
||||
#
|
||||
# These dictionaries use a printf-like syntax to direct substitution
|
||||
# from attributes of the nonterminal and its children..
|
||||
#
|
||||
# The rest of the below describes how table-driven semantic actions work
|
||||
# and gives a list of the format specifiers. The default() and
|
||||
# template_engine() methods implement most of the below.
|
||||
#
|
||||
# We allow for a couple of ways to interact with a node in a tree. So
|
||||
# step 1 after not seeing a custom method for a nonterminal is to
|
||||
# determine from what point of view tree-wise the rule is applied.
|
||||
|
||||
# In the diagram below, N is a nonterminal name, and K also a nonterminal
|
||||
# name but the one used as a key in the table.
|
||||
# we show where those are with respect to each other in the
|
||||
# AST tree for N.
|
||||
#
|
||||
#
|
||||
# N&K N N
|
||||
# / | ... \ / | ... \ / | ... \
|
||||
# O O O O O K O O O
|
||||
# |
|
||||
# K
|
||||
# TABLE_DIRECT TABLE_R TABLE_R0
|
||||
#
|
||||
# The default table is TABLE_DIRECT mapping By far, most rules used work this way.
|
||||
# TABLE_R0 is rarely used.
|
||||
#
|
||||
# The key K is then extracted from the subtree and used to find one
|
||||
# of the tables, T listed above. The result after applying T[K] is
|
||||
# a format string and arguments (a la printf()) for the formatting
|
||||
# engine.
|
||||
#
|
||||
# Escapes in the format string are:
|
||||
#
|
||||
# %c evaluate the node recursively. Its argument is a single
|
||||
# integer representing a node index.
|
||||
#
|
||||
# %p like %c but sets the operator precedence.
|
||||
# Its argument then is a tuple indicating the node
|
||||
# index and the precidence value, an integer.
|
||||
#
|
||||
# %C evaluate children recursively, with sibling children separated by the
|
||||
# given string. It needs a 3-tuple: a starting node, the maximimum
|
||||
# value of an end node, and a string to be inserted between sibling children
|
||||
#
|
||||
# %, Append ',' if last %C only printed one item. This is mostly for tuples
|
||||
# on the LHS of an assignment statement since BUILD_TUPLE_n pretty-prints
|
||||
# other tuples. The specifier takes no arguments
|
||||
#
|
||||
# %P same as %C but sets operator precedence. Its argument is a 4-tuple:
|
||||
# the node low and high indices, the separator, a string the precidence
|
||||
# value, an integer.
|
||||
#
|
||||
# %D Same as `%C` this is for left-recursive lists like kwargs where goes
|
||||
# to epsilon at the beginning. It needs a 3-tuple: a starting node, the
|
||||
# maximimum value of an end node, and a string to be inserted between
|
||||
# sibling children. If we were to use `%C` an extra separator with an
|
||||
# epsilon would appear at the beginning.
|
||||
#
|
||||
# %| Insert spaces to the current indentation level. Takes no arguments.
|
||||
#
|
||||
# %+ increase current indentation level. Takes no arguments.
|
||||
#
|
||||
# %- decrease current indentation level. Takes no arguments.
|
||||
#
|
||||
# %{...} evaluate ... in context of N
|
||||
#
|
||||
# %% literal '%'. Takes no arguments.
|
||||
#
|
||||
#
|
||||
# The '%' may optionally be followed by a number (C) in square
|
||||
# brackets, which makes the template_engine walk down to N[C] before
|
||||
# evaluating the escape code.
|
||||
|
||||
import sys
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
@@ -122,6 +162,29 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||
compile_mode='exec', is_pypy=False,
|
||||
linestarts={}):
|
||||
"""version is the Python version (a float) of the Python dialect
|
||||
|
||||
of both the AST and language we should produce.
|
||||
|
||||
out is IO-like file pointer to where the output should go. It
|
||||
whould have a getvalue() method.
|
||||
|
||||
scanner is a method to call when we need to scan tokens. Sometimes
|
||||
in producing output we will run across further tokens that need
|
||||
to be scaned.
|
||||
|
||||
If showast is True, we print the AST tree.
|
||||
|
||||
compile_mode is is either 'exec' or 'single'. It isthe compile
|
||||
mode that was used to create the AST and specifies a gramar variant within
|
||||
a Python version to use.
|
||||
|
||||
is_pypy should be True if the AST was generated for PyPy.
|
||||
|
||||
linestarts is a dictionary of line number to bytecode offset. This
|
||||
can sometimes assist in determinte which kind of source-code construct
|
||||
to use when there is ambiguity.
|
||||
"""
|
||||
GenericASTTraversal.__init__(self, ast=None)
|
||||
self.scanner = scanner
|
||||
params = {
|
||||
@@ -304,11 +367,18 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-3]
|
||||
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
break
|
||||
|
||||
# FIXME: the real situation is that when derived from
|
||||
# funcdef_annotate we the name has been filled in.
|
||||
# But when derived from funcdefdeco it hasn't Would like a better
|
||||
# way to distinquish.
|
||||
if self.f.getvalue()[-4:] == 'def ':
|
||||
self.write(code.attr.co_name)
|
||||
|
||||
# FIXME: handle and pass full annotate args
|
||||
make_function3_annotate(self, node, isLambda=False,
|
||||
codeNode=code, annotate_last=annotate_last)
|
||||
@@ -317,7 +387,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
self.n_mkfunc_annotate = n_mkfunc_annotate
|
||||
|
||||
@@ -352,13 +422,39 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node.type == 'call_function'
|
||||
p = self.prec
|
||||
self.prec = 80
|
||||
self.engine(('%c(%P)', 0, (1, -4, ', ', 100)), node)
|
||||
self.template_engine(('%c(%P)', 0,
|
||||
(1, -4, ', ', 100)), node)
|
||||
self.prec = p
|
||||
node.type == 'async_call_function'
|
||||
self.prune()
|
||||
self.n_async_call_function = n_async_call_function
|
||||
|
||||
self.n_build_list_unpack = self.n_build_list
|
||||
|
||||
if version == 3.5:
|
||||
def n_call_function(node):
|
||||
mapping = self._get_mapping(node)
|
||||
table = mapping[0]
|
||||
key = node
|
||||
for i in mapping[1:]:
|
||||
key = key[i]
|
||||
pass
|
||||
if key.kind.startswith('CALL_FUNCTION_VAR_KW'):
|
||||
# Python 3.5 changes the stack position of *args. kwargs come
|
||||
# after *args whereas in earlier Pythons, *args is at the end
|
||||
# which simpilfiies things from our perspective.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
# We will just swap the order to make it look like earlier Python 3.
|
||||
entry = table[key.kind]
|
||||
kwarg_pos = entry[2][1]
|
||||
args_pos = kwarg_pos - 1
|
||||
# Put last node[args_pos] after subsequent kwargs
|
||||
while node[kwarg_pos] == 'kwarg' and kwarg_pos < len(node):
|
||||
# swap node[args_pos] with node[kwargs_pos]
|
||||
node[kwarg_pos], node[args_pos] = node[args_pos], node[kwarg_pos]
|
||||
args_pos = kwarg_pos
|
||||
kwarg_pos += 1
|
||||
self.default(node)
|
||||
self.n_call_function = n_call_function
|
||||
|
||||
def n_funcdef(node):
|
||||
if self.version == 3.6:
|
||||
@@ -369,9 +465,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
is_code = hasattr(code_node, 'attr') and iscode(code_node.attr)
|
||||
if (is_code and
|
||||
(code_node.attr.co_flags & COMPILER_FLAG_BIT['COROUTINE'])):
|
||||
self.engine(('\n\n%|async def %c\n', -2), node)
|
||||
self.template_engine(('\n\n%|async def %c\n',
|
||||
-2), node)
|
||||
else:
|
||||
self.engine(('\n\n%|def %c\n', -2), node)
|
||||
self.template_engine(('\n\n%|def %c\n', -2),
|
||||
node)
|
||||
self.prune()
|
||||
self.n_funcdef = n_funcdef
|
||||
|
||||
@@ -469,10 +567,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
super(SourceWalker, self).preorder(node)
|
||||
self.set_pos_info(node)
|
||||
|
||||
def indentMore(self, indent=TAB):
|
||||
def indent_more(self, indent=TAB):
|
||||
self.indent += indent
|
||||
|
||||
def indentLess(self, indent=TAB):
|
||||
def indent_less(self, indent=TAB):
|
||||
self.indent = self.indent[:-len(indent)]
|
||||
|
||||
def traverse(self, node, indent=None, isLambda=False):
|
||||
@@ -523,6 +621,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
if self.pending_newlines:
|
||||
out = out[:-self.pending_newlines]
|
||||
if isinstance(out, str) and not PYTHON3:
|
||||
out = unicode(out, 'utf-8')
|
||||
self.f.write(out)
|
||||
|
||||
def println(self, *data):
|
||||
@@ -545,25 +645,19 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node == AST('return_stmt',
|
||||
[AST('ret_expr', [NONE]), Token('RETURN_VALUE')]))
|
||||
|
||||
def n_continue_stmt(self, node):
|
||||
if self.version >= 3.0 and node[0] == 'CONTINUE':
|
||||
t = node[0]
|
||||
if not t.linestart:
|
||||
# Artificially-added "continue" statements derived from JUMP_ABSOLUTE
|
||||
# don't have line numbers associated with them.
|
||||
# If this is a CONTINUE is to the same target as a JUMP_ABSOLUTE following it,
|
||||
# then the "continue" can be suppressed.
|
||||
op, offset = t.op, t.offset
|
||||
next_offset = self.scanner.next_offset(op, offset)
|
||||
scanner = self.scanner
|
||||
code = scanner.code
|
||||
if next_offset < len(code):
|
||||
next_inst = code[next_offset]
|
||||
if (scanner.opc.opname[next_inst] == 'JUMP_ABSOLUTE'
|
||||
and t.pattr == code[next_offset+1]):
|
||||
# Suppress "continue"
|
||||
self.prune()
|
||||
self.default(node)
|
||||
# Python 3.x can have be dead code as a result of its optimization?
|
||||
# So we'll add a # at the end of the return lambda so the rest is ignored
|
||||
def n_return_lambda(self, node):
|
||||
if 1 <= len(node) <= 2:
|
||||
self.preorder(node[0])
|
||||
self.write(' # Avoid dead code: ')
|
||||
self.prune()
|
||||
else:
|
||||
# We can't comment out like above because there may be a trailing ')'
|
||||
# that needs to be written
|
||||
assert len(node) == 3 and node[2] == 'LAMBDA_MARKER'
|
||||
self.preorder(node[0])
|
||||
self.prune()
|
||||
|
||||
def n_return_stmt(self, node):
|
||||
if self.params['isLambda']:
|
||||
@@ -582,6 +676,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_return_if_stmt(self, node):
|
||||
if self.params['isLambda']:
|
||||
self.write(' return ')
|
||||
self.preorder(node[0])
|
||||
self.prune()
|
||||
else:
|
||||
@@ -638,12 +733,12 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_expr(self, node):
|
||||
p = self.prec
|
||||
if node[0].type.startswith('binary_expr'):
|
||||
if node[0].kind.startswith('binary_expr'):
|
||||
n = node[0][-1][0]
|
||||
else:
|
||||
n = node[0]
|
||||
|
||||
self.prec = PRECEDENCE.get(n.type, -2)
|
||||
self.prec = PRECEDENCE.get(n.kind, -2)
|
||||
if n == 'LOAD_CONST' and repr(n.pattr)[0] == '-':
|
||||
self.prec = 6
|
||||
|
||||
@@ -726,9 +821,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
|
||||
def n_delete_subscr(self, node):
|
||||
if node[-2][0] == 'build_list' and node[-2][0][-1].type.startswith('BUILD_TUPLE'):
|
||||
if node[-2][0] == 'build_list' and node[-2][0][-1].kind.startswith('BUILD_TUPLE'):
|
||||
if node[-2][0][-1] != 'BUILD_TUPLE_0':
|
||||
node[-2][0].type = 'build_tuple2'
|
||||
node[-2][0].kind = 'build_tuple2'
|
||||
self.default(node)
|
||||
|
||||
n_store_subscr = n_binary_subscr = n_delete_subscr
|
||||
@@ -737,9 +832,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_tryfinallystmt(self, node):
|
||||
if len(node[1][0]) == 1 and node[1][0][0] == 'stmt':
|
||||
if node[1][0][0][0] == 'trystmt':
|
||||
node[1][0][0][0].type = 'tf_trystmt'
|
||||
node[1][0][0][0].kind = 'tf_trystmt'
|
||||
if node[1][0][0][0] == 'tryelsestmt':
|
||||
node[1][0][0][0].type = 'tf_tryelsestmt'
|
||||
node[1][0][0][0].kind = 'tf_tryelsestmt'
|
||||
self.default(node)
|
||||
|
||||
def n_exec_stmt(self, node):
|
||||
@@ -764,26 +859,26 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
if len(n) == 1 == len(n[0]) and n[0] == '_stmts':
|
||||
n = n[0][0][0]
|
||||
elif n[0].type in ('lastc_stmt', 'lastl_stmt'):
|
||||
elif n[0].kind in ('lastc_stmt', 'lastl_stmt'):
|
||||
n = n[0][0]
|
||||
else:
|
||||
if not preprocess:
|
||||
self.default(node)
|
||||
return
|
||||
|
||||
if n.type in ('ifstmt', 'iflaststmt', 'iflaststmtl'):
|
||||
node.type = 'ifelifstmt'
|
||||
n.type = 'elifstmt'
|
||||
elif n.type in ('ifelsestmtr',):
|
||||
node.type = 'ifelifstmt'
|
||||
n.type = 'elifelsestmtr'
|
||||
elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
node.type = 'ifelifstmt'
|
||||
if n.kind in ('ifstmt', 'iflaststmt', 'iflaststmtl'):
|
||||
node.kind = 'ifelifstmt'
|
||||
n.kind = 'elifstmt'
|
||||
elif n.kind in ('ifelsestmtr',):
|
||||
node.kind = 'ifelifstmt'
|
||||
n.kind = 'elifelsestmtr'
|
||||
elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
node.kind = 'ifelifstmt'
|
||||
self.n_ifelsestmt(n, preprocess=True)
|
||||
if n == 'ifelifstmt':
|
||||
n.type = 'elifelifstmt'
|
||||
elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
n.type = 'elifelsestmt'
|
||||
n.kind = 'elifelifstmt'
|
||||
elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
n.kind = 'elifelsestmt'
|
||||
if not preprocess:
|
||||
self.default(node)
|
||||
|
||||
@@ -792,7 +887,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_ifelsestmtr(self, node):
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.type = 'ifelsestmtr2'
|
||||
node.kind = 'ifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
if len(return_stmts_node) != 2:
|
||||
@@ -808,9 +903,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(self.indent, 'if ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
if_ret_at_end = False
|
||||
if len(return_stmts_node[0]) >= 3:
|
||||
@@ -823,27 +918,27 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
for n in return_stmts_node[0]:
|
||||
if (n[0] == 'ifstmt' and n[0][1][0] == 'return_if_stmts'):
|
||||
if prev_stmt_is_if_ret:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
prev_stmt_is_if_ret = True
|
||||
else:
|
||||
prev_stmt_is_if_ret = False
|
||||
if not past_else and not if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
past_else = True
|
||||
self.preorder(n)
|
||||
if not past_else or if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(return_stmts_node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
n_ifelsestmtr2 = n_ifelsestmtr
|
||||
|
||||
def n_elifelsestmtr(self, node):
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.type = 'elifelsestmtr2'
|
||||
node.kind = 'elifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
|
||||
@@ -858,22 +953,22 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(self.indent, 'elif ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
for n in return_stmts_node[0]:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
self.preorder(n)
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(return_stmts_node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
|
||||
def n_import_as(self, node):
|
||||
store_node = node[-1][-1]
|
||||
assert store_node.type.startswith('STORE_')
|
||||
assert store_node.kind.startswith('STORE_')
|
||||
iname = node[0].pattr # import name
|
||||
sname = store_node.pattr # store_name
|
||||
if iname and iname == sname or iname.startswith(sname + '.'):
|
||||
@@ -898,25 +993,25 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# LOAD_CONST code object ..
|
||||
# LOAD_CONST 'x0' if >= 3.3
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-3]
|
||||
code_node = node[-3]
|
||||
elif node[-2] == 'expr':
|
||||
code = node[-2][0]
|
||||
code_node = node[-2][0]
|
||||
else:
|
||||
# LOAD_CONST code object ..
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-2]
|
||||
code_node = node[-2]
|
||||
|
||||
func_name = code.attr.co_name
|
||||
func_name = code_node.attr.co_name
|
||||
self.write(func_name)
|
||||
|
||||
self.indentMore()
|
||||
self.make_function(node, isLambda=False, codeNode=code)
|
||||
self.indent_more()
|
||||
self.make_function(node, isLambda=False, codeNode=code_node)
|
||||
|
||||
if len(self.param_stack) > 1:
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def make_function(self, node, isLambda, nested=1,
|
||||
@@ -993,7 +1088,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 27
|
||||
if node[-1].type == 'list_iter':
|
||||
if node[-1].kind == 'list_iter':
|
||||
n = node[-1]
|
||||
elif self.is_pypy and node[-1] == 'JUMP_BACK':
|
||||
n = node[-2]
|
||||
@@ -1117,7 +1212,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write('{')
|
||||
if node[0] in ['LOAD_SETCOMP', 'LOAD_DICTCOMP']:
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
elif node[0].type == 'load_closure' and self.version >= 3.0:
|
||||
elif node[0].kind == 'load_closure' and self.version >= 3.0:
|
||||
self.setcomprehension_walk3(node, collection_index=4)
|
||||
else:
|
||||
self.comprehension_walk(node, iter_index=4)
|
||||
@@ -1184,7 +1279,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.type in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert designator, "Couldn't find designator in list/set comprehension"
|
||||
|
||||
self.preorder(n[0])
|
||||
@@ -1234,7 +1329,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -1255,7 +1350,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_listcomp(self, node):
|
||||
self.write('[')
|
||||
if node[0].type == 'load_closure':
|
||||
if node[0].kind == 'load_closure':
|
||||
self.listcomprehension_walk2(node)
|
||||
else:
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
@@ -1292,7 +1387,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -1408,9 +1503,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.println(':')
|
||||
|
||||
# class body
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.build_class(subclass_code)
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
self.currentclass = cclass
|
||||
if len(self.param_stack) > 1:
|
||||
@@ -1444,10 +1539,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def print_super_classes3(self, node):
|
||||
n = len(node)-1
|
||||
if node.type != 'expr':
|
||||
assert node[n].type.startswith('CALL_FUNCTION')
|
||||
if node.kind != 'expr':
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].type in ['expr', 'LOAD_CLASSNAME']:
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
|
||||
@@ -1481,13 +1576,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
self.write('{')
|
||||
line_number = self.line_number
|
||||
|
||||
if self.version >= 3.0 and not self.is_pypy:
|
||||
if node[0].type.startswith('kvlist'):
|
||||
if node[0].kind.startswith('kvlist'):
|
||||
# Python 3.5+ style key/value list in mapexpr
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
@@ -1510,11 +1605,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
i += 2
|
||||
pass
|
||||
pass
|
||||
elif len(node) > 1 and node[1].type.startswith('kvlist'):
|
||||
elif len(node) > 1 and node[1].kind.startswith('kvlist'):
|
||||
# Python 3.0..3.4 style key/value list in mapexpr
|
||||
kv_node = node[1]
|
||||
l = list(kv_node)
|
||||
if len(l) > 0 and l[0].type == 'kv3':
|
||||
if len(l) > 0 and l[0].kind == 'kv3':
|
||||
# Python 3.2 does this
|
||||
kv_node = node[1][0]
|
||||
l = list(kv_node)
|
||||
@@ -1539,7 +1634,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
i += 3
|
||||
pass
|
||||
pass
|
||||
elif node[-1].type.startswith('BUILD_CONST_KEY_MAP'):
|
||||
elif node[-1].kind.startswith('BUILD_CONST_KEY_MAP'):
|
||||
# Python 3.6+ style const map
|
||||
keys = node[-2].pattr
|
||||
values = node[:-2]
|
||||
@@ -1564,7 +1659,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
pass
|
||||
else:
|
||||
# Python 2 style kvlist
|
||||
assert node[-1].type.startswith('kvlist')
|
||||
assert node[-1].kind.startswith('kvlist')
|
||||
kv_node = node[-1] # goto kvlist
|
||||
|
||||
first_time = True
|
||||
@@ -1619,7 +1714,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if sep.startswith(",\n"):
|
||||
self.write(sep[1:])
|
||||
self.write('}')
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
@@ -1630,7 +1725,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.type
|
||||
lastnodetype = lastnode.kind
|
||||
|
||||
# If this build list is inside a CALL_FUNCTION_VAR,
|
||||
# then the first * has already been printed.
|
||||
@@ -1670,7 +1765,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = ''
|
||||
|
||||
for elem in flat_elems:
|
||||
@@ -1695,12 +1790,12 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if lastnode.attr == 1 and lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write(',')
|
||||
self.write(endchar)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
def n_unpack(self, node):
|
||||
if node[0].type.startswith('UNPACK_EX'):
|
||||
if node[0].kind.startswith('UNPACK_EX'):
|
||||
# Python 3+
|
||||
before_count, after_count = node[0].attr
|
||||
for i in range(before_count+1):
|
||||
@@ -1715,8 +1810,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
return
|
||||
for n in node[1:]:
|
||||
if n[0].type == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
if n[0].kind == 'unpack':
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
n_unpack_w_parens = n_unpack
|
||||
@@ -1725,33 +1820,34 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# A horrible hack for Python 3.0 .. 3.2
|
||||
if 3.0 <= self.version <= 3.2 and len(node) == 2:
|
||||
if (node[0][0] == 'LOAD_FAST' and node[0][0].pattr == '__locals__' and
|
||||
node[1][0].type == 'STORE_LOCALS'):
|
||||
node[1][0].kind == 'STORE_LOCALS'):
|
||||
self.prune()
|
||||
self.default(node)
|
||||
|
||||
def n_assign2(self, node):
|
||||
for n in node[-2:]:
|
||||
if n[0] == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def n_assign3(self, node):
|
||||
for n in node[-3:]:
|
||||
if n[0] == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def n_except_cond2(self, node):
|
||||
if node[-2][0] == 'unpack':
|
||||
node[-2][0].type = 'unpack_w_parens'
|
||||
node[-2][0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def engine(self, entry, startnode):
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
%c, %C, and so on.
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
# self.println("----> ", startnode.type, ', ', entry[0])
|
||||
|
||||
# self.println("----> ", startnode.kind, ', ', entry[0])
|
||||
fmt = entry[0]
|
||||
arg = 1
|
||||
i = 0
|
||||
@@ -1769,24 +1865,23 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if typ == '%': self.write('%')
|
||||
elif typ == '+':
|
||||
self.line_number += 1
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
elif typ == '-':
|
||||
self.line_number += 1
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
elif typ == '|':
|
||||
self.line_number += 1
|
||||
self.write(self.indent)
|
||||
# Used mostly on the LHS of an assignment
|
||||
# BUILD_TUPLE_n is pretty printed and may take care of other uses.
|
||||
elif typ == ',':
|
||||
if (node.type in ('unpack', 'unpack_w_parens') and
|
||||
if (node.kind in ('unpack', 'unpack_w_parens') and
|
||||
node[0].attr == 1):
|
||||
self.write(',')
|
||||
elif typ == 'c':
|
||||
if isinstance(entry[arg], int):
|
||||
entry_node = node[entry[arg]]
|
||||
self.preorder(entry_node)
|
||||
arg += 1
|
||||
entry_node = node[entry[arg]]
|
||||
self.preorder(entry_node)
|
||||
arg += 1
|
||||
elif typ == 'p':
|
||||
p = self.prec
|
||||
(index, self.prec) = entry[arg]
|
||||
@@ -1852,8 +1947,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
key = key[i]
|
||||
pass
|
||||
|
||||
if key.type in table:
|
||||
self.engine(table[key.type], node)
|
||||
if key.kind in table:
|
||||
self.template_engine(table[key.kind], node)
|
||||
self.prune()
|
||||
|
||||
def customize(self, customize):
|
||||
@@ -1877,7 +1972,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||
if v == 0:
|
||||
str = '%c(%C' # '%C' is a dummy here ...
|
||||
p2 = (0, 0, None) # .. because of this
|
||||
p2 = (0, 0, None) # .. because of the None in this
|
||||
else:
|
||||
str = '%c(%C, '
|
||||
p2 = (1, -2, ', ')
|
||||
@@ -2072,6 +2167,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# assert isinstance(tokens[0], Token)
|
||||
|
||||
if isLambda:
|
||||
for t in tokens:
|
||||
if t.kind == 'RETURN_END_IF':
|
||||
t.kind = 'RETURN_END_IF_LAMBDA'
|
||||
elif t.kind == 'RETURN_VALUE':
|
||||
t.kind = 'RETURN_VALUE_LAMBDA'
|
||||
tokens.append(Token('LAMBDA_MARKER'))
|
||||
try:
|
||||
ast = python_parser.parse(self.p, tokens, customize)
|
||||
@@ -2088,10 +2188,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# than fight (with the grammar to not emit "return None").
|
||||
if self.hide_internal:
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-1].kind in ('RETURN_VALUE', 'RETURN_VALUE_LAMBDA'):
|
||||
# Python 3.4's classes can add a "return None" which is
|
||||
# invalid syntax.
|
||||
if tokens[-2].type == 'LOAD_CONST':
|
||||
if tokens[-2].kind == 'LOAD_CONST':
|
||||
if isTopLevel or tokens[-2].pattr is None:
|
||||
del tokens[-2:]
|
||||
else:
|
||||
|
@@ -43,7 +43,7 @@ BIN_OP_FUNCS = {
|
||||
'BINARY_OR': operator.or_,
|
||||
}
|
||||
|
||||
JUMP_OPs = None
|
||||
JUMP_OPS = None
|
||||
|
||||
# --- exceptions ---
|
||||
|
||||
@@ -225,8 +225,8 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
import uncompyle6.scanners.scanner36 as scan
|
||||
scanner = scan.Scanner36()
|
||||
|
||||
global JUMP_OPs
|
||||
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
||||
global JUMP_OPS
|
||||
JUMP_OPS = list(scan.JUMP_OPS) + ['JUMP_BACK']
|
||||
|
||||
# use changed Token class
|
||||
# We (re)set this here to save exception handling,
|
||||
@@ -242,18 +242,18 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
scanner.resetTokenClass() # restore Token class
|
||||
|
||||
targets1 = dis.findlabels(code_obj1.co_code)
|
||||
tokens1 = [t for t in tokens1 if t.type != 'COME_FROM']
|
||||
tokens2 = [t for t in tokens2 if t.type != 'COME_FROM']
|
||||
tokens1 = [t for t in tokens1 if t.kind != 'COME_FROM']
|
||||
tokens2 = [t for t in tokens2 if t.kind != 'COME_FROM']
|
||||
|
||||
i1 = 0; i2 = 0
|
||||
offset_map = {}; check_jumps = {}
|
||||
while i1 < len(tokens1):
|
||||
if i2 >= len(tokens2):
|
||||
if len(tokens1) == len(tokens2) + 2 \
|
||||
and tokens1[-1].type == 'RETURN_VALUE' \
|
||||
and tokens1[-2].type == 'LOAD_CONST' \
|
||||
and tokens1[-1].kind == 'RETURN_VALUE' \
|
||||
and tokens1[-2].kind == 'LOAD_CONST' \
|
||||
and tokens1[-2].pattr is None \
|
||||
and tokens1[-3].type == 'RETURN_VALUE':
|
||||
and tokens1[-3].kind == 'RETURN_VALUE':
|
||||
break
|
||||
else:
|
||||
raise CmpErrorCodeLen(name, tokens1, tokens2)
|
||||
@@ -265,13 +265,13 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
raise CmpErrorCode(name, tokens1[idx1].offset, tokens1[idx1],
|
||||
tokens2[idx2], tokens1, tokens2)
|
||||
|
||||
if tokens1[i1].type != tokens2[i2].type:
|
||||
if tokens1[i1].type == 'LOAD_CONST' == tokens2[i2].type:
|
||||
if tokens1[i1].kind != tokens2[i2].kind:
|
||||
if tokens1[i1].kind == 'LOAD_CONST' == tokens2[i2].kind:
|
||||
i = 1
|
||||
while tokens1[i1+i].type == 'LOAD_CONST':
|
||||
while tokens1[i1+i].kind == 'LOAD_CONST':
|
||||
i += 1
|
||||
if tokens1[i1+i].type.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \
|
||||
and i == int(tokens1[i1+i].type.split('_')[-1]):
|
||||
if tokens1[i1+i].kind.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \
|
||||
and i == int(tokens1[i1+i].kind.split('_')[-1]):
|
||||
t = tuple([ elem.pattr for elem in tokens1[i1:i1+i] ])
|
||||
if t != tokens2[i2].pattr:
|
||||
raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1],
|
||||
@@ -279,60 +279,60 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
i1 += i + 1
|
||||
i2 += 1
|
||||
continue
|
||||
elif i == 2 and tokens1[i1+i].type == 'ROT_TWO' and tokens2[i2+1].type == 'UNPACK_SEQUENCE_2':
|
||||
elif i == 2 and tokens1[i1+i].kind == 'ROT_TWO' and tokens2[i2+1].kind == 'UNPACK_SEQUENCE_2':
|
||||
i1 += 3
|
||||
i2 += 2
|
||||
continue
|
||||
elif i == 2 and tokens1[i1+i].type in BIN_OP_FUNCS:
|
||||
f = BIN_OP_FUNCS[tokens1[i1+i].type]
|
||||
elif i == 2 and tokens1[i1+i].kind in BIN_OP_FUNCS:
|
||||
f = BIN_OP_FUNCS[tokens1[i1+i].kind]
|
||||
if f(tokens1[i1].pattr, tokens1[i1+1].pattr) == tokens2[i2].pattr:
|
||||
i1 += 3
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens1[i1].type == 'UNARY_NOT':
|
||||
if tokens2[i2].type == 'POP_JUMP_IF_TRUE':
|
||||
if tokens1[i1+1].type == 'POP_JUMP_IF_FALSE':
|
||||
elif tokens1[i1].kind == 'UNARY_NOT':
|
||||
if tokens2[i2].kind == 'POP_JUMP_IF_TRUE':
|
||||
if tokens1[i1+1].kind == 'POP_JUMP_IF_FALSE':
|
||||
i1 += 2
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens2[i2].type == 'POP_JUMP_IF_FALSE':
|
||||
if tokens1[i1+1].type == 'POP_JUMP_IF_TRUE':
|
||||
elif tokens2[i2].kind == 'POP_JUMP_IF_FALSE':
|
||||
if tokens1[i1+1].kind == 'POP_JUMP_IF_TRUE':
|
||||
i1 += 2
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens1[i1].type in ('JUMP_FORWARD', 'JUMP_BACK') \
|
||||
and tokens1[i1-1].type == 'RETURN_VALUE' \
|
||||
and tokens2[i2-1].type in ('RETURN_VALUE', 'RETURN_END_IF') \
|
||||
elif tokens1[i1].kind in ('JUMP_FORWARD', 'JUMP_BACK') \
|
||||
and tokens1[i1-1].kind == 'RETURN_VALUE' \
|
||||
and tokens2[i2-1].kind in ('RETURN_VALUE', 'RETURN_END_IF') \
|
||||
and int(tokens1[i1].offset) not in targets1:
|
||||
i1 += 1
|
||||
continue
|
||||
elif tokens1[i1].type == 'JUMP_FORWARD' and tokens2[i2].type == 'JUMP_BACK' \
|
||||
and tokens1[i1+1].type == 'JUMP_BACK' and tokens2[i2+1].type == 'JUMP_BACK' \
|
||||
elif tokens1[i1].kind == 'JUMP_FORWARD' and tokens2[i2].kind == 'JUMP_BACK' \
|
||||
and tokens1[i1+1].kind == 'JUMP_BACK' and tokens2[i2+1].kind == 'JUMP_BACK' \
|
||||
and int(tokens1[i1].pattr) == int(tokens1[i1].offset) + 3:
|
||||
if int(tokens1[i1].pattr) == int(tokens1[i1+1].offset):
|
||||
i1 += 2
|
||||
i2 += 2
|
||||
continue
|
||||
elif tokens1[i1].type == 'LOAD_NAME' and tokens2[i2].type == 'LOAD_CONST' \
|
||||
elif tokens1[i1].kind == 'LOAD_NAME' and tokens2[i2].kind == 'LOAD_CONST' \
|
||||
and tokens1[i1].pattr == 'None' and tokens2[i2].pattr is None:
|
||||
pass
|
||||
elif tokens1[i1].type == 'LOAD_GLOBAL' and tokens2[i2].type == 'LOAD_NAME' \
|
||||
elif tokens1[i1].kind == 'LOAD_GLOBAL' and tokens2[i2].kind == 'LOAD_NAME' \
|
||||
and tokens1[i1].pattr == tokens2[i2].pattr:
|
||||
pass
|
||||
elif tokens1[i1].type == 'LOAD_ASSERT' and tokens2[i2].type == 'LOAD_NAME' \
|
||||
elif tokens1[i1].kind == 'LOAD_ASSERT' and tokens2[i2].kind == 'LOAD_NAME' \
|
||||
and tokens1[i1].pattr == tokens2[i2].pattr:
|
||||
pass
|
||||
elif (tokens1[i1].type == 'RETURN_VALUE' and
|
||||
tokens2[i2].type == 'RETURN_END_IF'):
|
||||
elif (tokens1[i1].kind == 'RETURN_VALUE' and
|
||||
tokens2[i2].kind == 'RETURN_END_IF'):
|
||||
pass
|
||||
elif (tokens1[i1].type == 'BUILD_TUPLE_0' and
|
||||
elif (tokens1[i1].kind == 'BUILD_TUPLE_0' and
|
||||
tokens2[i2].pattr == ()):
|
||||
pass
|
||||
else:
|
||||
raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1],
|
||||
tokens2[i2], tokens1, tokens2)
|
||||
elif tokens1[i1].type in JUMP_OPs and tokens1[i1].pattr != tokens2[i2].pattr:
|
||||
if tokens1[i1].type == 'JUMP_BACK':
|
||||
elif tokens1[i1].kind in JUMP_OPS and tokens1[i1].pattr != tokens2[i2].pattr:
|
||||
if tokens1[i1].kind == 'JUMP_BACK':
|
||||
dest1 = int(tokens1[i1].pattr)
|
||||
dest2 = int(tokens2[i2].pattr)
|
||||
if offset_map[dest1] != dest2:
|
||||
@@ -387,28 +387,28 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
class Token(scanner.Token):
|
||||
"""Token class with changed semantics for 'cmp()'."""
|
||||
def __cmp__(self, o):
|
||||
t = self.type # shortcut
|
||||
if t == 'BUILD_TUPLE_0' and o.type == 'LOAD_CONST' and o.pattr == ():
|
||||
t = self.kind # shortcut
|
||||
if t == 'BUILD_TUPLE_0' and o.kind == 'LOAD_CONST' and o.pattr == ():
|
||||
return 0
|
||||
if t == 'COME_FROM' == o.type:
|
||||
if t == 'COME_FROM' == o.kind:
|
||||
return 0
|
||||
if t == 'PRINT_ITEM_CONT' and o.type == 'PRINT_ITEM':
|
||||
if t == 'PRINT_ITEM_CONT' and o.kind == 'PRINT_ITEM':
|
||||
return 0
|
||||
if t == 'RETURN_VALUE' and o.type == 'RETURN_END_IF':
|
||||
if t == 'RETURN_VALUE' and o.kind == 'RETURN_END_IF':
|
||||
return 0
|
||||
if t == 'JUMP_IF_FALSE_OR_POP' and o.type == 'POP_JUMP_IF_FALSE':
|
||||
if t == 'JUMP_IF_FALSE_OR_POP' and o.kind == 'POP_JUMP_IF_FALSE':
|
||||
return 0
|
||||
if JUMP_OPs and t in JUMP_OPs:
|
||||
if JUMP_OPS and t in JUMP_OPS:
|
||||
# ignore offset
|
||||
return t == o.type
|
||||
return (t == o.type) or self.pattr == o.pattr
|
||||
return t == o.kind
|
||||
return (t == o.kind) or self.pattr == o.pattr
|
||||
|
||||
def __repr__(self):
|
||||
return '%s %s (%s)' % (str(self.type), str(self.attr),
|
||||
return '%s %s (%s)' % (str(self.kind), str(self.attr),
|
||||
repr(self.pattr))
|
||||
|
||||
def __str__(self):
|
||||
return '%s\t%-17s %r' % (self.offset, self.type, self.pattr)
|
||||
return '%s\t%-17s %r' % (self.offset, self.kind, self.pattr)
|
||||
|
||||
def compare_code_with_srcfile(pyc_filename, src_filename, weak_verify=False):
|
||||
"""Compare a .pyc with a source code file."""
|
||||
@@ -442,4 +442,4 @@ if __name__ == '__main__':
|
||||
t2 = Token('LOAD_CONST', -421, 'code_object _expandLang', 55)
|
||||
print(repr(t1))
|
||||
print(repr(t2))
|
||||
print(t1.type == t2.type, t1.attr == t2.attr)
|
||||
print(t1.kind == t2.kind, t1.attr == t2.attr)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.10.0'
|
||||
VERSION='2.13.0'
|
||||
|
Reference in New Issue
Block a user