You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Compare commits
45 Commits
release-py
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
c54a47b15f | ||
|
d1e02afb4b | ||
|
f4ceb6304d | ||
|
503039ab51 | ||
|
8393064136 | ||
|
bb9b3ac9cf | ||
|
05ac60ea74 | ||
|
d138a01bf1 | ||
|
9e8e4f54c7 | ||
|
a06a5e1cd8 | ||
|
1048f6a964 | ||
|
7fed237077 | ||
|
8b816ead0d | ||
|
300d387349 | ||
|
27ab6fe2f5 | ||
|
2e164763eb | ||
|
d332bde104 | ||
|
0893652943 | ||
|
6efd7afda3 | ||
|
ee3202779a | ||
|
9c072a6a42 | ||
|
277ad36566 | ||
|
af3d46b35c | ||
|
e1bc0c5cd6 | ||
|
5a519ed36a | ||
|
af10f99776 | ||
|
0cbafa6e3a | ||
|
4afaee2a36 | ||
|
daea3c348c | ||
|
bf45260588 | ||
|
34a356d237 | ||
|
d9c1374a59 | ||
|
2e05137f2b | ||
|
267ecda070 | ||
|
7e89839777 | ||
|
c7f8edd5ef | ||
|
6a991833a3 | ||
|
28ee3f1257 | ||
|
e9588e56e2 | ||
|
7b2217fda4 | ||
|
5ca219f3d3 | ||
|
b733a1b036 | ||
|
4615cda03f | ||
|
eb92418224 | ||
|
844221cd43 |
237
ChangeLog
237
ChangeLog
@@ -1,6 +1,241 @@
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.11.4
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, pytest/validate.py, uncompyle6/parser.py,
|
||||
uncompyle6/scanner.py: Misc cleanups... remove code now in xdis require at least xdis 3.5.4 PyPy tolerance
|
||||
in validate testing
|
||||
|
||||
2017-08-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_basic.py, uncompyle6/parser.py, uncompyle6/scanner.py:
|
||||
Allow 3-part version string lookups, e.g 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
'2.7.13', accept that in looking up either a scanner or a parser.
|
||||
|
||||
2017-08-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Link typo Name is trepan2 now not trepan
|
||||
|
||||
2017-08-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, README.rst, __pkginfo__.py,
|
||||
uncompyle6/semantics/consts.py, uncompyle6/version.py: Get ready for
|
||||
release 2.11.3 need xdis 3.5.1 for now. Adjust for xdis "is-not" which we need as
|
||||
"is not"
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Revert commit to wrong branch
|
||||
|
||||
2017-08-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Remove six from Python-2.4/2.5 package
|
||||
|
||||
2017-07-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/scanners/scanner3.py, uncompyle6/scanners/scanner30.py:
|
||||
xdis's "exception match" is now "exception-match"
|
||||
|
||||
2017-07-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: xdis 3.5.1 is botched?
|
||||
|
||||
2017-07-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Use newer xdis
|
||||
|
||||
2017-07-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Fixes issue #124
|
||||
|
||||
2017-07-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
* HISTORY.md: History updates
|
||||
|
||||
2017-07-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: RsT doc formatting
|
||||
|
||||
2017-07-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, HOW-TO-REPORT-A-BUG.md, NEWS, uncompyle6/version.py:
|
||||
Get ready for release 2.11.2
|
||||
|
||||
2017-07-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanner.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner26.py,
|
||||
uncompyle6/scanners/scanner3.py, uncompyle6/scanners/scanner30.py,
|
||||
uncompyle6/scanners/tok.py: Use xdis 3.5.0's opcode sets
|
||||
|
||||
2017-07-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pyenvlib.py, uncompyle6/scanners/pypy32.py,
|
||||
uncompyle6/scanners/pypy35.py, uncompyle6/scanners/scanner15.py,
|
||||
uncompyle6/scanners/scanner32.py, uncompyle6/scanners/scanner34.py,
|
||||
uncompyle6/scanners/scanner35.py, uncompyle6/scanners/scanner36.py:
|
||||
Start supporting Pypy 3.5 (5.7.1-beta)
|
||||
|
||||
2017-07-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug26/03_loop_if_cf.py,
|
||||
uncompyle6/parsers/parse26.py: Loops in Python 2.4-2.6 loop
|
||||
come_from Looks like Python 2.4-2.6 may have a COME_FROM(_LOOP) before the
|
||||
jump_back. Fixes Issue #123
|
||||
|
||||
2017-06-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : Work around not having real flow-control analysis
|
||||
|
||||
2017-06-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: A guard against badly
|
||||
formated bytecode
|
||||
|
||||
2017-06-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, test/simple_source/bug31/04_def_annotate.py,
|
||||
uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/pysource.py: 3.x funciton and annotation bug
|
||||
fixes
|
||||
|
||||
2017-06-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.11.1
|
||||
|
||||
2017-06-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/scanner.py,
|
||||
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner30.py, uncompyle6/semantics/pysource.py:
|
||||
Use xdis' instruction offset calculation fns.. next_offset, op_size, has_argument
|
||||
|
||||
2017-06-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Python 2 sometimes need
|
||||
str->uncode in writing?
|
||||
|
||||
2017-06-19 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Allow deparsed out to be str as
|
||||
well as unicode
|
||||
|
||||
2017-06-18 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.11.0
|
||||
|
||||
2017-06-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Adjust nodeInfo if it is a
|
||||
Token
|
||||
|
||||
2017-06-13 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Add nonterminal node in
|
||||
extractInfo
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/make_function.py: Fragment tag more expressions Revise make_function3 comment wrt args and kwargs
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Fragment tag array subscripts
|
||||
|
||||
2017-06-10 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Create README.rst
|
||||
|
||||
2017-06-10 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* README.rst: Create README.rst
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Set YIELD_VALUE offset in a
|
||||
<yield> expr
|
||||
|
||||
2017-06-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Python 3.2 MAKE_FUNCTION
|
||||
again.. Was handling bug32/01_named_and_kwargs.py wrong again
|
||||
|
||||
2017-06-09 R. Bernstein <rocky@users.noreply.github.com>
|
||||
|
||||
* : Merge pull request #119 from rocky/scan-longconstant Simplify access to L65536 ...
|
||||
|
||||
2017-06-09 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Attempt to document the
|
||||
MAKE_FUNCTION/MAKE_LAMBDA mess... in Python 3.0+
|
||||
|
||||
2017-06-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: Correct make_function3 for
|
||||
Pytohn 3.2
|
||||
|
||||
2017-06-08 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/pysource.py: Disable "continue" removal in
|
||||
pysource.py "continue" could be the only statement and then removing it might
|
||||
lead to a dangling "else".
|
||||
|
||||
2017-06-07 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Mark "pass" offsets. Start routine to find previous node.
|
||||
|
||||
2017-06-06 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/semantics/fragments.py:
|
||||
Remove hacky fragments try fixup... hacky call_function code is also not needed or will be reinstated
|
||||
properly. Better grammar structure for Python 3.6 call_function.
|
||||
|
||||
2017-06-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse36.py,
|
||||
uncompyle6/scanners/scanner36.py: BUILD_{MAP,TUPLE}_UNPACK &
|
||||
CALL_FUNCTION_EX_KW... Bang on these in 3.6. Not totally succesfull right now. In fact a
|
||||
regression on one of the test cases
|
||||
|
||||
2017-06-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py: Important fragments bug fix... start, finish that had been adjusted wasn't getting reflected in
|
||||
final returned deparsed.offsets dictionary. Redo keeping API
|
||||
compatibility, i.e we still use namedtuple NodeInfo.
|
||||
|
||||
2017-06-04 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/semantics/pysource.py:
|
||||
Python 3.5 *args with kwargs handling. 3.5 is a snowflake here. Thank you, Python. Fully fixes Issue 95. 3.6 is broken on this source, but for a *different* reason. Sigh.
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.10.1
|
||||
* README.rst, __pkginfo__.py,
|
||||
test/simple_source/bug35/04_CALL_FUNCTION_VAR_KW.py,
|
||||
uncompyle6/semantics/fragments.py: Small changes. fragment tag EXEC_STMT
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .travis.yml: Streamline .travis.yml a little bit
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: We need six
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst, circle.yml, requirements-dev.txt: Go over
|
||||
administrivia
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.10.1
|
||||
|
||||
2017-06-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
|
15
HISTORY.md
15
HISTORY.md
@@ -44,8 +44,8 @@ it appears that Hartmut did most of the work to get this code to
|
||||
accept the full Python language. He added precedence to the table
|
||||
specifiers, support for multiple versions of Python, the
|
||||
pretty-printing of docstrings, lists, and hashes. He also wrote test and verification routines of
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He says he could verify against the
|
||||
entire Python library. However I have subsequently found small and relatively obscure bugs in the decompilation code.
|
||||
deparsed bytecode, and used this in an extensive set of tests that he also wrote. He says he could verify against the
|
||||
entire Python library. However I have subsequently found small and relatively obscure bugs in the decompilation code.
|
||||
|
||||
decompyle2.2 was packaged for Debian (sarge) by
|
||||
[Ben Burton around 2002](https://packages.qa.debian.org/d/decompyle.html). As
|
||||
@@ -66,7 +66,7 @@ code to handle first Python 2.3 and then 2.4 bytecodes. Because of
|
||||
jump optimization introduced in the CPython bytecode compiler at that
|
||||
time, various JUMP instructions were classifed as going backwards, and
|
||||
COME FROM instructions were reintroduced. See
|
||||
[RELEASE-2.4-CHANGELOG.txt](https://github.com/rocky/python-uncompyle6/blob/master/DECOMPYLE-2.4-CHANGELOG.txt)
|
||||
[RELEASE-2.4-CHANGELOG.txt](https://github.com/rocky/python-uncompyle6/blob/master/DECOMPYLE-2.4-CHANGELOG.txt)
|
||||
for more details here. There wasn't a public
|
||||
release of RELEASE-2.4 and bytecodes other than Python 2.4 weren't
|
||||
supported. Dan says the Python 2.3 version could verify the entire
|
||||
@@ -99,7 +99,7 @@ made a few commits later on. But mostly wibiti, and Guenther
|
||||
Starnberger got the code to where uncompyle2 was around 2012.
|
||||
|
||||
In `uncompyle`, decompilation of python bytecode 2.5 & 2.6 is done by
|
||||
transforming the byte code into a a pseudo 2.7 python bytecode and is
|
||||
transforming the byte code into a pseudo-2.7 Python bytecode and is
|
||||
based on code from Eloi Vanderbeken.
|
||||
|
||||
This project, `uncompyle6`, abandons that approach for various
|
||||
@@ -120,10 +120,10 @@ while, handling Python bytecodes from Python versions 2.5+ and
|
||||
3.2+. In doing so, it has been expedient to separate this into three
|
||||
projects:
|
||||
|
||||
* bytecode loading and disassembly ([xdis](https://pypi.python.org/pypi/xdis)),
|
||||
* marshaling/unmarshaling, bytecode loading and disassembly ([xdis](https://pypi.python.org/pypi/xdis)),
|
||||
* parsing and tree building ([spark_parser](https://pypi.python.org/pypi/spark_parser)),
|
||||
* this project - grammar and semantic actions for decompiling
|
||||
([uncompyle6](https://pypi.python.org/pypi/spark_parser)).
|
||||
([uncompyle6](https://pypi.python.org/pypi/uncompyle6)).
|
||||
|
||||
|
||||
Over the many years, code styles and Python features have
|
||||
@@ -162,5 +162,8 @@ support has been lagging.
|
||||
Tests for the project have been, or are being, culled from all of the
|
||||
projects mentioned.
|
||||
|
||||
For a little bit of the history of changes to the Early-algorithm parser,
|
||||
see the file [NEW-FEATURES.rst](https://github.com/rocky/python-spark/blob/master/NEW-FEATURES.rst) in the [python-spark github repository](https://github.com/rocky/python-spark).
|
||||
|
||||
NB. If you find mistakes, want corrections, or want your name added
|
||||
(or removed), please contact me.
|
||||
|
@@ -19,7 +19,7 @@ So it is likely you'll find a mistranslation in decompiling.
|
||||
The basic requirement is pretty simple:
|
||||
|
||||
* Python bytecode
|
||||
* Source text
|
||||
* Python source text
|
||||
|
||||
## What to send (additional helpful information)
|
||||
|
||||
@@ -50,7 +50,7 @@ one fool can learn, so can another."
|
||||
|
||||
## Narrowing the problem
|
||||
|
||||
I don't need the entire source code base for which one file or module
|
||||
I don't need or want the entire source code base for which one file or module
|
||||
can't be decompiled. I just need that one file or module only. If
|
||||
there are several files, file a bug report for each file.
|
||||
|
||||
|
78
NEWS
78
NEWS
@@ -1,14 +1,58 @@
|
||||
uncompyle6 2.10.1 2016-06-3 Marylin Frankel
|
||||
uncompyle6 2.11.4 2017-08-15
|
||||
|
||||
* scanner and parser now allow 3-part version string lookups,
|
||||
e.g. 2.7.1 We allow a float here, but if passed a string like '2.7'. or
|
||||
* unpin 3.5.1. xdis 3.5.4 has been releasd and fixes the problems we had. Use that.
|
||||
* some routnes here moved to xdis. Use the xdis version
|
||||
* README.rst: Link typo Name is trepan2 now not trepan
|
||||
* xdis-forched change adjust for COMPARE_OP "is-not" in
|
||||
semanatic routines. We need "is not".
|
||||
* Some PyPy tolerance in validate testing.
|
||||
* Some pyston tolerance
|
||||
|
||||
uncompyle6 2.11.3 2017-08-09
|
||||
|
||||
Very minor changes
|
||||
|
||||
- RsT doc fixes and updates
|
||||
- use newer xdis, but not too new; 3.5.2 breaks uncompyle6
|
||||
- use xdis opcode sets
|
||||
- xdis "exception match" is now "exception-match"
|
||||
|
||||
uncompyle6 2.11.2 2017-07-09
|
||||
|
||||
- Start supporting Pypy 3.5 (5.7.1-beta)
|
||||
- use xdis 3.5.0's opcode sets and require xdis 3.5.0
|
||||
- Correct some Python 2.4-2.6 loop detection
|
||||
- guard against badly formatted bytecode
|
||||
|
||||
uncompyle6 2.11.1 2017-06-25
|
||||
|
||||
- Python 3.x annotation and function signature fixes
|
||||
- Bump xdis version
|
||||
- Small pysource bug fixes
|
||||
|
||||
uncompyle6 2.11.0 2017-06-18 Fleetwood
|
||||
- Major improvements in fragment tracking
|
||||
* Add nonterminal node in extractInfo
|
||||
* tag more offsets in expressions
|
||||
* tag array subscripts
|
||||
* set YIELD value offset in a <yield> expr
|
||||
* fix a long-standing bug in not adjusting final AST when melding other deparse ASTs
|
||||
- Fixes yet again for make_function node handling; document what's up here
|
||||
- Fix bug in snowflake Python 3.5 *args kwargs
|
||||
|
||||
uncompyle6 2.10.1 2017-06-3 Marylin Frankel
|
||||
|
||||
- fix some fragments parsing bugs
|
||||
- was returning the wrong type sometimes in deparse_code_around_offset()
|
||||
- capture function name in offsets
|
||||
- track changes to ifelstrmtr node from pysource into fragments
|
||||
|
||||
uncompyle6 2.10.0 2016-05-30 Elaine Gordon
|
||||
uncompyle6 2.10.0 2017-05-30 Elaine Gordon
|
||||
|
||||
- Add fuzzy offset deparse lookup
|
||||
- 3.6 bugfixes
|
||||
- Add fuzzy offset deparse look up
|
||||
- 3.6 bug fixes
|
||||
- fix EXTENDED_ARGS handling (and in 2.6 and others)
|
||||
- semantic routine make_function fragments.py
|
||||
- MAKE_FUNCTION handling
|
||||
@@ -19,19 +63,19 @@ uncompyle6 2.10.0 2016-05-30 Elaine Gordon
|
||||
- 3.5 FUNCTION_VAR bug
|
||||
- 3.x pass statement insdie while True
|
||||
- Improve 3.2 decompilation
|
||||
- Fixed -o argument processing (Gregrory)
|
||||
- Fixed -o argument processing (grkov90)
|
||||
- Reduce scope of LOAD_ASSERT as expr to 3.4+
|
||||
- "await" statement fixes
|
||||
- 2.3, 2.4 "if 1 .." fixes
|
||||
- 3.x annotation fixes
|
||||
|
||||
uncompyle6 2.9.11 2016-04-06
|
||||
uncompyle6 2.9.11 2017-04-06
|
||||
|
||||
- Better support for Python 3.5+ BUILD_MAP_UNPACK
|
||||
- Start 3.6 CALL_FUNCTION_EX support
|
||||
- Many decompilation bug fixes. (Many more remain). See ChangeLog
|
||||
|
||||
uncompyle6 2.9.10 2016-02-25
|
||||
uncompyle6 2.9.10 2017-02-25
|
||||
|
||||
- Python grammar rule fixes
|
||||
- Add ability to get grammar coverage on runs
|
||||
@@ -98,7 +142,7 @@ uncompyle6 2.9.6 2016-11-20
|
||||
uncompyle6 2.9.5 2016-11-13
|
||||
|
||||
- Fix Python 3 bugs:
|
||||
* improprer while 1 else
|
||||
* improper while 1 else
|
||||
* docstring indent
|
||||
* 3.3 default values in lambda expressions
|
||||
* start 3.0 decompilation (needs newer xdis)
|
||||
@@ -108,12 +152,12 @@ uncompyle6 2.9.5 2016-11-13
|
||||
uncompyle6 2.9.4 2016-11-02
|
||||
|
||||
- Handle Python 3.x function annotations
|
||||
- track def keywoard-parameter line-splitting in source code better
|
||||
- track def keyword-parameter line-splitting in source code better
|
||||
- bump min xdis version to mask previous xdis bug
|
||||
|
||||
uncompyle6 2.9.3 2016-10-26
|
||||
|
||||
Release forced by incompatiblity change in xdis 3.2.0.
|
||||
Release forced by incompatibility change in xdis 3.2.0.
|
||||
|
||||
- Python 3.1 bugs:
|
||||
* handle "with ... as"
|
||||
@@ -145,7 +189,7 @@ uncompyle6 2.9.0 2016-10-09
|
||||
this Forces change in requirements.txt and _pkg_info_.py
|
||||
- Start Python 1.5 decompiling; another round of work is needed to
|
||||
remove bugs
|
||||
- Simpify python 2.1 grammar
|
||||
- Simplify python 2.1 grammar
|
||||
- Fix bug with -t ... Wasn't showing source text when -t option was given
|
||||
- Fix 2.1-2.6 bug in list comprehension
|
||||
|
||||
@@ -168,7 +212,7 @@ control-flow structure detection is done.
|
||||
. 3.0 .. 3.2 *args processing
|
||||
. 3.0 .. 3.2 call name and kwargs bug
|
||||
. 3.0 .. getting parameter of *
|
||||
. 3.0 .. handling varible number of args
|
||||
. 3.0 .. handling variable number of args
|
||||
. 3.0 .. "if" structure bugs
|
||||
* 3.5+ if/else bugs
|
||||
* 2.2-2.6 bugs
|
||||
@@ -219,7 +263,7 @@ uncompyle6 2.7.1 2016-07-26
|
||||
|
||||
uncompyle6 2.7.0 2016-07-15
|
||||
|
||||
- Many Syntax and verifification bugs removed
|
||||
- Many Syntax and verification bugs removed
|
||||
tested on standard libraries from 2.3.7 to 3.5.1
|
||||
and they all decompile and verify fine.
|
||||
I'm sure there are more bugs though.
|
||||
@@ -246,9 +290,9 @@ uncompyle6 2.6.0 2016-07-07
|
||||
- Better <2.6 vs. 2.7 grammar separation
|
||||
- Fix some 2.7 deparsing bugs
|
||||
- Fix bug in installing uncompyle6 script
|
||||
- Doc improvments
|
||||
- Doc improvements
|
||||
|
||||
uncompyle6 2.5.0 2016-06-22 Summer Solstace
|
||||
uncompyle6 2.5.0 2016-06-22 Summer Solstice
|
||||
|
||||
- Much better Python 3.2-3.5 coverage.
|
||||
3.4.6 is probably the best;3.2 and 3.5 are weaker
|
||||
@@ -260,7 +304,7 @@ uncompyle6 2.5.0 2016-06-22 Summer Solstace
|
||||
uncompyle6 2.4.0 2016-05-18 (in memory of Lewis Bernstein)
|
||||
|
||||
- Many Python 3 bugs fixed:
|
||||
* Python 3.2 to 3.5 libaries largely
|
||||
* Python 3.2 to 3.5 libraries largely
|
||||
uncompyle and most verify
|
||||
- pydisassembler:
|
||||
* disassembles all code objects in a file
|
||||
@@ -318,7 +362,7 @@ uncompyle6 2.2.0 2016-04-30
|
||||
|
||||
uncompyle6 2.2.0 2016-04-02
|
||||
|
||||
- Support single-mode (in addtion to exec-mode) compilation
|
||||
- Support single-mode (in addition to exec-mode) compilation
|
||||
- Start to DRY Python 2 and Python 3 grammars
|
||||
- Fix bug in if else ternary construct
|
||||
- Fix bug in uncomplye6 -d and -r options (via lelicopter)
|
||||
|
@@ -56,7 +56,7 @@ This uses setup.py, so it follows the standard Python routine:
|
||||
|
||||
::
|
||||
|
||||
pip install -e setup.py
|
||||
pip install -e .
|
||||
pip install -r requirements-dev.txt
|
||||
python setup.py install # may need sudo
|
||||
# or if you have pyenv:
|
||||
@@ -171,9 +171,12 @@ See Also
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique than what is used here.
|
||||
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Include some fixes like supporting function annotations
|
||||
* The HISTORY_ file.
|
||||
* `How to report a bug <https://github.com/rocky/python-uncompyle6/blob/master/HOW-TO-REPORT-A-BUG.md>`_
|
||||
* https://github.com/rocky/python-xdis : Cross Python version disassembler
|
||||
* https://github.com/rocky/python-xasm : Cross Python version assembler
|
||||
|
||||
.. |downloads| image:: https://img.shields.io/pypi/dd/uncompyle6.svg
|
||||
.. _trepan: https://pypi.python.org/pypi/trepan
|
||||
|
||||
.. _trepan: https://pypi.python.org/pypi/trepan2
|
||||
.. _HISTORY: https://github.com/rocky/python-uncompyle6/blob/master/HISTORY.md
|
||||
.. _debuggers: https://pypi.python.org/pypi/trepan3k
|
||||
.. _remake: https://bashdb.sf.net/remake
|
||||
|
@@ -33,14 +33,14 @@ classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
# The rest in alphabetic order
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
entry_points={
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'uncompyle6=uncompyle6.bin.uncompile:main_bin',
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.6.1, < 1.7.0',
|
||||
'xdis >= 3.3.1, < 3.4.0', 'six']
|
||||
'xdis >= 3.5.4, < 3.6.0', 'six']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
11
pytest/test_basic.py
Normal file
11
pytest/test_basic.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from uncompyle6.scanner import get_scanner
|
||||
from uncompyle6.parser import get_python_parser
|
||||
|
||||
def test_get_scanner():
|
||||
# See that we can retrieve a scanner using a full version number
|
||||
assert get_scanner('2.7.13')
|
||||
|
||||
|
||||
def test_get_parser():
|
||||
# See that we can retrieve a sparser using a full version number
|
||||
assert get_python_parser('2.7.13')
|
@@ -123,7 +123,9 @@ def validate_uncompyle(text, mode='exec'):
|
||||
original_text = text
|
||||
|
||||
deparsed = deparse_code(PYTHON_VERSION, original_code,
|
||||
compile_mode=mode, out=six.StringIO())
|
||||
compile_mode=mode,
|
||||
out=six.StringIO(),
|
||||
is_pypy=IS_PYPY)
|
||||
uncompyled_text = deparsed.text
|
||||
uncompyled_code = compile(uncompyled_text, '<string>', 'exec')
|
||||
|
||||
|
BIN
test/bytecode_2.6/03_loop_if_cf.pyc
Normal file
BIN
test/bytecode_2.6/03_loop_if_cf.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
BIN
test/bytecode_3.6/04_CALL_FUNCTION_VAR_KW.pyc-notyet
Normal file
Binary file not shown.
19
test/simple_source/bug26/03_loop_if_cf.py
Normal file
19
test/simple_source/bug26/03_loop_if_cf.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Bug in < 2.6 is having a COME_FROM_LOOP (but we
|
||||
# don't tag that so it is just COME_FROM *before*
|
||||
# a jump back to the loop.
|
||||
def pickup(self, open_players, open_buf, wrap_buf):
|
||||
for aplayer in self._game.active_players:
|
||||
|
||||
if aplayer in open_players:
|
||||
aplayer.send(open_players)
|
||||
|
||||
if self == aplayer:
|
||||
for awatcher in self._watchers:
|
||||
if awatcher._can_see_detail:
|
||||
awatcher.send(open_buf)
|
||||
else:
|
||||
awatcher.send(wrap_buf)
|
||||
else:
|
||||
self._game.send(aplayer.side)
|
||||
else:
|
||||
self._game.send(aplayer.side, wrap_buf)
|
@@ -9,7 +9,7 @@ def open(file, mode = "r", buffering = None,
|
||||
newline = None, closefd = True) -> "IOBase":
|
||||
return text
|
||||
|
||||
def foo(x: 'an argument that defaults to 5' = 5):
|
||||
def foo1(x: 'an argument that defaults to 5' = 5):
|
||||
print(x)
|
||||
|
||||
def div(a: dict(type=float, help='the dividend'),
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# sql/schema.py
|
||||
# Note that kwargs comes before "positional" args
|
||||
def tometadata(self, metadata, schema, Table, args, name=None):
|
||||
table = Table(
|
||||
name, metadata, schema=schema,
|
||||
|
@@ -29,7 +29,7 @@ from fnmatch import fnmatch
|
||||
|
||||
TEST_VERSIONS=('2.3.7', '2.4.6', '2.5.6', '2.6.9',
|
||||
'pypy-2.4.0', 'pypy-2.6.1',
|
||||
'pypy-5.0.1', 'pypy-5.3.1',
|
||||
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
|
||||
'2.7.10', '2.7.11', '2.7.12', '2.7.13',
|
||||
'3.0.1', '3.1.5', '3.2.6',
|
||||
'3.3.5', '3.3.6',
|
||||
|
@@ -11,10 +11,10 @@ from __future__ import print_function
|
||||
import sys
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.magics import py_str2float
|
||||
from spark_parser import GenericASTBuilder, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.show import maybe_show_asm
|
||||
|
||||
|
||||
class ParserError(Exception):
|
||||
def __init__(self, token, offset):
|
||||
self.token = token
|
||||
@@ -605,7 +605,15 @@ def get_python_parser(
|
||||
explanation of the different modes.
|
||||
"""
|
||||
|
||||
# If version is a string, turn that into the corresponding float.
|
||||
if isinstance(version, str):
|
||||
version = py_str2float(version)
|
||||
|
||||
# FIXME: there has to be a better way...
|
||||
# We could do this as a table lookup, but that would force us
|
||||
# in import all of the parsers all of the time. Perhaps there is
|
||||
# a lazy way of doing the import?
|
||||
|
||||
if version < 3.0:
|
||||
if version == 1.5:
|
||||
import uncompyle6.parsers.parse15 as parse15
|
||||
@@ -758,6 +766,7 @@ def python_parser(version, co, out=sys.stdout, showasm=False,
|
||||
if __name__ == '__main__':
|
||||
def parse_test(co):
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
ast = python_parser('2.7.13', co, showasm=True, is_pypy=True)
|
||||
ast = python_parser(PYTHON_VERSION, co, showasm=True, is_pypy=IS_PYPY)
|
||||
print(ast)
|
||||
return
|
||||
|
@@ -84,6 +84,12 @@ class Python26Parser(Python2Parser):
|
||||
ja_cf_pop ::= JUMP_ABSOLUTE come_froms POP_TOP
|
||||
jf_cf_pop ::= JUMP_FORWARD come_froms POP_TOP
|
||||
|
||||
# The first optional COME_FROM when it appears is really
|
||||
# COME_FROM_LOOP, but in <= 2.6 we don't distinguish
|
||||
# this
|
||||
|
||||
cf_jb_cf_pop ::= _come_from JUMP_BACK come_froms POP_TOP
|
||||
|
||||
bp_come_from ::= POP_BLOCK COME_FROM
|
||||
jb_bp_come_from ::= JUMP_BACK bp_come_from
|
||||
|
||||
@@ -111,7 +117,8 @@ class Python26Parser(Python2Parser):
|
||||
break_stmt ::= BREAK_LOOP JUMP_BACK
|
||||
|
||||
# Semantic actions want else_suitel to be at index 3
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cf_pop else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jb_cf_pop else_suitel
|
||||
|
||||
ifelsestmtc ::= testexpr c_stmts_opt ja_cf_pop else_suitec
|
||||
|
||||
# Semantic actions want suite_stmts_opt to be at index 3
|
||||
|
@@ -496,8 +496,8 @@ class Python3Parser(PythonParser):
|
||||
token.type = self.call_fn_name(token)
|
||||
uniq_param = args_kw + args_pos
|
||||
if self.version == 3.5 and opname.startswith('CALL_FUNCTION_VAR'):
|
||||
# Python 3.5 changes the stack position of where * args, the
|
||||
# first LOAD_FAST, below are located.
|
||||
# Python 3.5 changes the stack position of *args. KW args come
|
||||
# after *args.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
if opname.endswith('KW'):
|
||||
kw = 'expr '
|
||||
@@ -507,11 +507,17 @@ class Python3Parser(PythonParser):
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) + kw + token.type)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
if self.version >= 3.6 and opname == 'CALL_FUNCTION_EX_KW':
|
||||
rule = ('call_function36 ::= '
|
||||
'expr build_tuple_unpack_with_call build_map_unpack_with_call '
|
||||
'CALL_FUNCTION_EX_KW_1')
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
rule = 'call_function ::= call_function36'
|
||||
else:
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
if self.version >= 3.5:
|
||||
@@ -610,9 +616,9 @@ class Python3Parser(PythonParser):
|
||||
assign2_pypy ::= expr expr designator designator
|
||||
""", nop_func)
|
||||
continue
|
||||
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW') \
|
||||
or opname.startswith('CALL_FUNCTION_KW'):
|
||||
elif (opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_EX_KW')
|
||||
or opname.startswith('CALL_FUNCTION_KW')):
|
||||
self.custom_classfunc_rule(opname, token, customize)
|
||||
elif opname == 'LOAD_DICTCOMP':
|
||||
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||
@@ -633,6 +639,18 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = 'expr ::= build_list_unpack'
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_TUPLE_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_tuple_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname.startswith('BUILD_MAP_UNPACK_WITH_CALL'):
|
||||
v = token.attr
|
||||
rule = ('build_map_unpack_with_call ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
@@ -642,7 +660,10 @@ class Python3Parser(PythonParser):
|
||||
if opname_base == 'BUILD_TUPLE':
|
||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
rule = ('build_tuple ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32) % 32) +
|
||||
'expr ' * (v % 32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse2
|
||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||
|
@@ -25,12 +25,13 @@ class Python36Parser(Python35Parser):
|
||||
|
||||
func_args36 ::= expr BUILD_TUPLE_0
|
||||
call_function ::= func_args36 unmapexpr CALL_FUNCTION_EX
|
||||
call_function ::= func_args36 build_map_unpack_with_call CALL_FUNCTION_EX_KW_1
|
||||
|
||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt POP_BLOCK LOAD_CONST
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
call_function ::= expr expr CALL_FUNCTION_EX
|
||||
call_function ::= expr expr expr CALL_FUNCTION_EX_KW
|
||||
call_function ::= expr expr expr CALL_FUNCTION_EX_KW_1
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
|
@@ -16,6 +16,8 @@ import sys
|
||||
|
||||
from uncompyle6 import PYTHON3, IS_PYPY
|
||||
from uncompyle6.scanners.tok import Token
|
||||
from xdis.bytecode import op_size
|
||||
from xdis.magics import py_str2float
|
||||
|
||||
# The byte code versions we support
|
||||
PYTHON_VERSIONS = (1.5,
|
||||
@@ -88,7 +90,7 @@ class Scanner(object):
|
||||
if op is None:
|
||||
op = self.code[pos]
|
||||
target = self.get_argument(pos)
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
target += pos + 3
|
||||
return target
|
||||
|
||||
@@ -99,7 +101,7 @@ class Scanner(object):
|
||||
def print_bytecode(self):
|
||||
for i in self.op_range(0, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op in self.opc.hasjabs+self.opc.hasjrel:
|
||||
if op in self.JUMP_OPs:
|
||||
dest = self.get_target(i, op)
|
||||
print('%i\t%s\t%i' % (i, self.opname[op], dest))
|
||||
else:
|
||||
@@ -214,9 +216,6 @@ class Scanner(object):
|
||||
result.append(offset)
|
||||
return result
|
||||
|
||||
def op_hasArgument(self, op):
|
||||
return self.op_size(op) > 1
|
||||
|
||||
def op_range(self, start, end):
|
||||
"""
|
||||
Iterate through positions of opcodes, skipping
|
||||
@@ -224,20 +223,7 @@ class Scanner(object):
|
||||
"""
|
||||
while start < end:
|
||||
yield start
|
||||
start += self.op_size(self.code[start])
|
||||
|
||||
def next_offset(self, op, offset):
|
||||
return offset + self.op_size(op)
|
||||
|
||||
def op_size(self, op):
|
||||
"""
|
||||
Return size of operator with its arguments
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 2 if self.version >= 3.6 else 1
|
||||
else:
|
||||
return 2 if self.version >= 3.6 else 3
|
||||
start += op_size(self.code[start], self.opc)
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
"""
|
||||
@@ -269,13 +255,16 @@ class Scanner(object):
|
||||
self.Token = tokenClass
|
||||
return self.Token
|
||||
|
||||
def op_has_argument(op, opc):
|
||||
return op >= opc.HAVE_ARGUMENT
|
||||
|
||||
def parse_fn_counts(argc):
|
||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||
|
||||
|
||||
def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
|
||||
# If version is a string, turn that into the corresponding float.
|
||||
if isinstance(version, str):
|
||||
version = py_str2float(version)
|
||||
|
||||
# Pick up appropriate scanner
|
||||
if version in PYTHON_VERSIONS:
|
||||
v_str = "%s" % (int(version * 10))
|
||||
@@ -302,5 +291,6 @@ def get_scanner(version, is_pypy=False, show_asm=None):
|
||||
if __name__ == "__main__":
|
||||
import inspect, uncompyle6
|
||||
co = inspect.currentframe().f_code
|
||||
scanner = get_scanner('2.7.13', True)
|
||||
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
||||
tokens, customize = scanner.ingest(co, {})
|
||||
|
@@ -1,22 +1,18 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 3.2 bytecode scanner/deparser
|
||||
Python PyPy 3.2 decompiler scanner.
|
||||
|
||||
This overlaps Python's 3.2's dis module, but it can be run from
|
||||
Python 3 and other versions of Python. Also, we save token
|
||||
information for later use in deparsing.
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner32 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc # is this rgith?
|
||||
from xdis.opcodes import opcode_32 as opc # is this right?
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
# We base this off of 3.2
|
||||
class ScannerPyPy32(scan.Scanner32):
|
||||
def __init__(self, show_asm):
|
||||
# There are no differences in initialization between
|
||||
|
22
uncompyle6/scanners/pypy35.py
Normal file
22
uncompyle6/scanners/pypy35.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 3.2 decompiler scanner.
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner35 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_35 as opc # is this right?
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
# We base this off of 3.5
|
||||
class ScannerPyPy35(scan.Scanner35):
|
||||
def __init__(self, show_asm):
|
||||
# There are no differences in initialization between
|
||||
# pypy 3.5 and 3.5
|
||||
scan.Scanner35.__init__(self, show_asm, is_pypy=True)
|
||||
self.version = 3.5
|
||||
return
|
@@ -1,6 +1,6 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.5 bytecode scanner/deparser
|
||||
Python 1.5 bytecode decompiler scanner.
|
||||
|
||||
This massages tokenized 1.5 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
"""
|
||||
@@ -25,14 +25,15 @@ from __future__ import print_function
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import op_has_argument
|
||||
from uncompyle6.scanner import L65536
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import op_has_argument, op_size
|
||||
|
||||
import uncompyle6.scanner as scan
|
||||
from uncompyle6.scanner import Scanner
|
||||
|
||||
class Scanner2(scan.Scanner):
|
||||
class Scanner2(Scanner):
|
||||
def __init__(self, version, show_asm=None, is_pypy=False):
|
||||
scan.Scanner.__init__(self, version, show_asm, is_pypy)
|
||||
Scanner.__init__(self, version, show_asm, is_pypy)
|
||||
self.pop_jump_if = frozenset([self.opc.PJIF, self.opc.PJIT])
|
||||
self.jump_forward = frozenset([self.opc.JUMP_ABSOLUTE, self.opc.JUMP_FORWARD])
|
||||
# This is the 2.5+ default
|
||||
@@ -186,9 +187,9 @@ class Scanner2(scan.Scanner):
|
||||
oparg = self.get_argument(offset) + extended_arg
|
||||
extended_arg = 0
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
extended_arg = oparg * scan.L65536
|
||||
extended_arg = oparg * L65536
|
||||
continue
|
||||
if op in self.opc.hasconst:
|
||||
if op in self.opc.CONST_OPS:
|
||||
const = co.co_consts[oparg]
|
||||
if iscode(const):
|
||||
oparg = const
|
||||
@@ -209,23 +210,23 @@ class Scanner2(scan.Scanner):
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
pattr = const
|
||||
elif op in self.opc.hasname:
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.hasjrel:
|
||||
elif op in self.opc.JREL_OPS:
|
||||
# use instead: hasattr(self, 'patch_continue'): ?
|
||||
if self.version == 2.7:
|
||||
self.patch_continue(tokens, offset, op)
|
||||
pattr = repr(offset + 3 + oparg)
|
||||
elif op in self.opc.hasjabs:
|
||||
elif op in self.opc.JABS_OPS:
|
||||
# use instead: hasattr(self, 'patch_continue'): ?
|
||||
if self.version == 2.7:
|
||||
self.patch_continue(tokens, offset, op)
|
||||
pattr = repr(oparg)
|
||||
elif op in self.opc.haslocal:
|
||||
elif op in self.opc.LOCAL_OPS:
|
||||
pattr = varnames[oparg]
|
||||
elif op in self.opc.hascompare:
|
||||
elif op in self.opc.COMPARE_OPS:
|
||||
pattr = self.opc.cmp_op[oparg]
|
||||
elif op in self.opc.hasfree:
|
||||
elif op in self.opc.FREE_OPS:
|
||||
pattr = free[oparg]
|
||||
|
||||
if op in self.varargs_ops:
|
||||
@@ -327,7 +328,7 @@ class Scanner2(scan.Scanner):
|
||||
for i in self.op_range(0, n):
|
||||
op = self.code[i]
|
||||
self.prev.append(i)
|
||||
if self.op_hasArgument(op):
|
||||
if op_has_argument(op, self.opc):
|
||||
self.prev.append(i)
|
||||
self.prev.append(i)
|
||||
pass
|
||||
@@ -380,7 +381,7 @@ class Scanner2(scan.Scanner):
|
||||
if elem != code[i]:
|
||||
match = False
|
||||
break
|
||||
i += self.op_size(code[i])
|
||||
i += op_size(code[i], self.opc)
|
||||
|
||||
if match:
|
||||
i = self.prev[i]
|
||||
@@ -451,7 +452,7 @@ class Scanner2(scan.Scanner):
|
||||
self.not_continue.add(jmp)
|
||||
jmp = self.get_target(jmp)
|
||||
prev_offset = self.prev[except_match]
|
||||
# COMPARE_OP argument should be "exception match" or 10
|
||||
# COMPARE_OP argument should be "exception-match" or 10
|
||||
if (self.code[prev_offset] == self.opc.COMPARE_OP and
|
||||
self.code[prev_offset+1] != 10):
|
||||
return None
|
||||
@@ -602,7 +603,7 @@ class Scanner2(scan.Scanner):
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in self.opc.hasjabs + self.opc.hasjrel:
|
||||
elif self.code[test] in self.opc.JUMP_OPs:
|
||||
self.ignore_if.add(test)
|
||||
test_target = self.get_target(test)
|
||||
if test_target > (jump_back+3):
|
||||
@@ -617,7 +618,7 @@ class Scanner2(scan.Scanner):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op == self.opc.SETUP_EXCEPT:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset, op)
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
if target != end:
|
||||
@@ -641,7 +642,7 @@ class Scanner2(scan.Scanner):
|
||||
setup_except_nest -= 1
|
||||
elif self.code[end_finally_offset] == self.opc.SETUP_EXCEPT:
|
||||
setup_except_nest += 1
|
||||
end_finally_offset += self.op_size(code[end_finally_offset])
|
||||
end_finally_offset += op_size(code[end_finally_offset], self.opc)
|
||||
pass
|
||||
|
||||
# Add the except blocks
|
||||
@@ -842,7 +843,7 @@ class Scanner2(scan.Scanner):
|
||||
else:
|
||||
# We still have the case in 2.7 that the next instruction
|
||||
# is a jump to a SETUP_LOOP target.
|
||||
next_offset = target + self.op_size(self.code[target])
|
||||
next_offset = target + op_size(self.code[target], self.opc)
|
||||
next_op = self.code[next_offset]
|
||||
if self.op_name(next_op) == 'JUMP_FORWARD':
|
||||
jump_target = self.get_target(next_offset, next_op)
|
||||
@@ -904,7 +905,9 @@ class Scanner2(scan.Scanner):
|
||||
'start': start-3,
|
||||
'end': pre_rtarget})
|
||||
|
||||
self.not_continue.add(pre_rtarget)
|
||||
# FIXME: this is yet another case were we need dominators.
|
||||
if pre_rtarget not in self.linestartoffsets or self.version < 2.7:
|
||||
self.not_continue.add(pre_rtarget)
|
||||
|
||||
if rtarget < end:
|
||||
# We have an "else" block of some kind.
|
||||
@@ -991,11 +994,11 @@ class Scanner2(scan.Scanner):
|
||||
oparg = self.get_argument(offset)
|
||||
|
||||
if label is None:
|
||||
if op in self.opc.hasjrel and self.op_name(op) != 'FOR_ITER':
|
||||
# if (op in self.opc.hasjrel and
|
||||
if op in self.opc.JREL_OPS and self.op_name(op) != 'FOR_ITER':
|
||||
# if (op in self.opc.JREL_OPS and
|
||||
# (self.version < 2.0 or op != self.opc.FOR_ITER)):
|
||||
label = offset + 3 + oparg
|
||||
elif self.version == 2.7 and op in self.opc.hasjabs:
|
||||
elif self.version == 2.7 and op in self.opc.JABS_OPS:
|
||||
if op in (self.opc.JUMP_IF_FALSE_OR_POP,
|
||||
self.opc.JUMP_IF_TRUE_OR_POP):
|
||||
if (oparg > offset):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
"""
|
||||
@@ -15,6 +15,7 @@ if PYTHON3:
|
||||
intern = sys.intern
|
||||
|
||||
import uncompyle6.scanners.scanner2 as scan
|
||||
from uncompyle6.scanner import L65536
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_26
|
||||
@@ -178,9 +179,9 @@ class Scanner26(scan.Scanner2):
|
||||
oparg = self.get_argument(offset) + extended_arg
|
||||
extended_arg = 0
|
||||
if op == self.opc.EXTENDED_ARG:
|
||||
extended_arg = oparg * scan.L65536
|
||||
extended_arg = oparg * L65536
|
||||
continue
|
||||
if op in self.opc.hasconst:
|
||||
if op in self.opc.CONST_OPS:
|
||||
const = co.co_consts[oparg]
|
||||
# We can't use inspect.iscode() because we may be
|
||||
# using a different version of Python than the
|
||||
@@ -205,9 +206,9 @@ class Scanner26(scan.Scanner2):
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
pattr = const
|
||||
elif op in self.opc.hasname:
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.hasjrel:
|
||||
elif op in self.opc.JREL_OPS:
|
||||
pattr = repr(offset + 3 + oparg)
|
||||
if op == self.opc.JUMP_FORWARD:
|
||||
target = self.get_target(offset)
|
||||
@@ -217,13 +218,13 @@ class Scanner26(scan.Scanner2):
|
||||
if len(tokens) and tokens[-1].type == 'JUMP_BACK':
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
|
||||
elif op in self.opc.hasjabs:
|
||||
elif op in self.opc.JABS_OPS:
|
||||
pattr = repr(oparg)
|
||||
elif op in self.opc.haslocal:
|
||||
elif op in self.opc.LOCAL_OPS:
|
||||
pattr = varnames[oparg]
|
||||
elif op in self.opc.hascompare:
|
||||
elif op in self.opc.COMPARE_OPS:
|
||||
pattr = self.opc.cmp_op[oparg]
|
||||
elif op in self.opc.hasfree:
|
||||
elif op in self.opc.FREE_OPS:
|
||||
pattr = free[oparg]
|
||||
if op in self.varargs_ops:
|
||||
# CE - Hack for >= 2.5
|
||||
|
@@ -25,10 +25,11 @@ from __future__ import print_function
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
from uncompyle6.scanner import Scanner, op_has_argument
|
||||
from uncompyle6.scanner import Scanner
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.bytecode import Bytecode, op_has_argument, op_size
|
||||
from uncompyle6.scanner import Token, parse_fn_counts
|
||||
import xdis
|
||||
|
||||
# Get all the opcodes into globals
|
||||
import xdis.opcodes.opcode_33 as op3
|
||||
@@ -468,7 +469,7 @@ class Scanner3(Scanner):
|
||||
self.prev = self.prev_op = [0]
|
||||
for offset in self.op_range(0, codelen):
|
||||
op = code[offset]
|
||||
for _ in range(self.op_size(op)):
|
||||
for _ in range(op_size(op, self.opc)):
|
||||
self.prev_op.append(offset)
|
||||
|
||||
def find_jump_targets(self, debug):
|
||||
@@ -518,7 +519,7 @@ class Scanner3(Scanner):
|
||||
oparg = code[offset+1]
|
||||
else:
|
||||
oparg = code[offset+1] + code[offset+2] * 256
|
||||
next_offset = self.next_offset(op, offset)
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
|
||||
if label is None:
|
||||
if op in op3.hasjrel and op != self.opc.FOR_ITER:
|
||||
@@ -564,7 +565,7 @@ class Scanner3(Scanner):
|
||||
if elem != code[i]:
|
||||
match = False
|
||||
break
|
||||
i += self.op_size(code[i])
|
||||
i += op_size(code[i], self.opc)
|
||||
|
||||
if match is True:
|
||||
i = self.prev_op[i]
|
||||
@@ -632,11 +633,11 @@ class Scanner3(Scanner):
|
||||
rel_offset = 0
|
||||
if self.version >= 3.6:
|
||||
target = self.code[offset+1]
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
rel_offset = offset + 2
|
||||
else:
|
||||
target = self.code[offset+1] + self.code[offset+2] * 256
|
||||
if op in self.opc.hasjrel:
|
||||
if op in self.opc.JREL_OPS:
|
||||
rel_offset = offset + 3
|
||||
pass
|
||||
pass
|
||||
@@ -757,7 +758,7 @@ class Scanner3(Scanner):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op in self.pop_jump_tf:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset)
|
||||
rtarget = self.restrict_to_parent(target, parent)
|
||||
prev_op = self.prev_op
|
||||
@@ -920,7 +921,7 @@ class Scanner3(Scanner):
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception-match':
|
||||
return
|
||||
if self.version >= 3.5:
|
||||
# Python 3.5 may remove as dead code a JUMP
|
||||
@@ -932,9 +933,9 @@ class Scanner3(Scanner):
|
||||
# not from SETUP_EXCEPT
|
||||
next_op = rtarget
|
||||
if code[next_op] == self.opc.POP_BLOCK:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if next_op in targets:
|
||||
for try_op in targets[next_op]:
|
||||
come_from_op = code[try_op]
|
||||
@@ -957,12 +958,12 @@ class Scanner3(Scanner):
|
||||
end = self.restrict_to_parent(target, parent)
|
||||
self.fixed_jumps[offset] = end
|
||||
elif op == self.opc.POP_EXCEPT:
|
||||
next_offset = self.next_offset(op, offset)
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
target = self.get_target(next_offset)
|
||||
if target > next_offset:
|
||||
next_op = code[next_offset]
|
||||
if (self.opc.JUMP_ABSOLUTE == next_op and
|
||||
END_FINALLY != code[self.next_offset(next_op, next_offset)]):
|
||||
END_FINALLY != code[xdis.next_offset(next_op, self.opc, next_offset)]):
|
||||
self.fixed_jumps[next_offset] = target
|
||||
self.except_targets[target] = next_offset
|
||||
|
||||
|
@@ -10,6 +10,8 @@ from __future__ import print_function
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_30 as opc
|
||||
from xdis.bytecode import op_size
|
||||
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
||||
@@ -118,7 +120,7 @@ class Scanner30(Scanner3):
|
||||
|
||||
if test == offset:
|
||||
loop_type = 'while 1'
|
||||
elif self.code[test] in opc.hasjabs+opc.hasjrel:
|
||||
elif self.code[test] in opc.JUMP_OPs:
|
||||
self.ignore_if.add(test)
|
||||
test_target = self.get_target(test)
|
||||
if test_target > (jump_back+3):
|
||||
@@ -133,7 +135,7 @@ class Scanner30(Scanner3):
|
||||
'start': jump_back+3,
|
||||
'end': end})
|
||||
elif op in JUMP_TF:
|
||||
start = offset + self.op_size(op)
|
||||
start = offset + op_size(op, self.opc)
|
||||
target = self.get_target(offset)
|
||||
rtarget = self.restrict_to_parent(target, parent)
|
||||
prev_op = self.prev_op
|
||||
@@ -293,7 +295,7 @@ class Scanner30(Scanner3):
|
||||
# except block return
|
||||
jump_prev = prev_op[offset]
|
||||
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||
if self.opc.cmp_op[code[jump_prev+1]] == 'exception-match':
|
||||
return
|
||||
if self.version >= 3.5:
|
||||
# Python 3.5 may remove as dead code a JUMP
|
||||
@@ -305,9 +307,9 @@ class Scanner30(Scanner3):
|
||||
# not from SETUP_EXCEPT
|
||||
next_op = rtarget
|
||||
if code[next_op] == self.opc.POP_BLOCK:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||
next_op += self.op_size(self.code[next_op])
|
||||
next_op += op_size(self.code[next_op], self.opc)
|
||||
if next_op in targets:
|
||||
for try_op in targets[next_op]:
|
||||
come_from_op = code[try_op]
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.2 bytecode scanner/deparser
|
||||
Python 3.2 bytecode decompiler scanner.
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.2 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.4 bytecode scanner/deparser
|
||||
Python 3.4 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.4 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.5 bytecode scanner/deparser
|
||||
Python 3.5 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.5 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -16,8 +19,8 @@ JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
|
||||
class Scanner35(Scanner3):
|
||||
|
||||
def __init__(self, show_asm=None):
|
||||
Scanner3.__init__(self, 3.5, show_asm)
|
||||
def __init__(self, show_asm=None, is_pypy=False):
|
||||
Scanner3.__init__(self, 3.5, show_asm, is_pypy)
|
||||
return
|
||||
pass
|
||||
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.6 bytecode scanner/deparser
|
||||
Python 3.6 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.6 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
@@ -28,8 +31,12 @@ class Scanner36(Scanner3):
|
||||
if t.op == self.opc.CALL_FUNCTION_EX and t.attr & 1:
|
||||
t.type = 'CALL_FUNCTION_EX_KW'
|
||||
pass
|
||||
if t.op == self.opc.CALL_FUNCTION_KW:
|
||||
elif t.op == self.opc.CALL_FUNCTION_KW:
|
||||
t.type = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals())
|
||||
elif t.op == self.opc.BUILD_TUPLE_UNPACK_WITH_CALL:
|
||||
t.type = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr
|
||||
elif t.op == self.opc.BUILD_MAP_UNPACK_WITH_CALL:
|
||||
t.type = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
||||
|
||||
|
@@ -65,10 +65,10 @@ class Token:
|
||||
if self.pattr:
|
||||
pattr = self.pattr
|
||||
if self.opc:
|
||||
if self.op in self.opc.hasjrel:
|
||||
if self.op in self.opc.JREL_OPS:
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + self.pattr
|
||||
elif self.op in self.opc.hasjabs:
|
||||
elif self.op in self.opc.JABS_OPS:
|
||||
self.pattr= str(self.pattr)
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + str(self.pattr)
|
||||
|
@@ -176,7 +176,7 @@ TABLE_DIRECT = {
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27)),
|
||||
'conditional_lambda': ( '(%c if %c else %c)', 2, 0, 3),
|
||||
'return_lambda': ('%c', 0),
|
||||
'compare': ( '%p %[-1]{pattr} %p', (0, 19), (1, 19) ),
|
||||
'compare': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 29), (1, 30)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
|
@@ -44,7 +44,7 @@ do it recursively which is where offsets are probably located.
|
||||
|
||||
For example in:
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
|
||||
n
|
||||
The node position 0 will be associated with "import".
|
||||
|
||||
"""
|
||||
@@ -77,11 +77,12 @@ from uncompyle6.semantics.consts import (
|
||||
)
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from spark_parser.ast import GenericASTTraversalPruningException
|
||||
|
||||
from collections import namedtuple
|
||||
NodeInfo = namedtuple("NodeInfo", "node start finish")
|
||||
ExtractInfo = namedtuple("ExtractInfo",
|
||||
"lineNo lineStartOffset markerLine selectedLine selectedText")
|
||||
"lineNo lineStartOffset markerLine selectedLine selectedText nonterminal")
|
||||
|
||||
TABLE_DIRECT_FRAGMENT = {
|
||||
'break_stmt': ( '%|%rbreak\n', ),
|
||||
@@ -159,8 +160,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def set_pos_info(self, node, start, finish, name=None):
|
||||
if name is None: name = self.name
|
||||
if hasattr(node, 'offset'):
|
||||
self.offsets[name, node.offset] = \
|
||||
NodeInfo(node = node, start = start, finish = finish)
|
||||
node.start = start
|
||||
node.finish = finish
|
||||
self.offsets[name, node.offset] = node
|
||||
|
||||
if hasattr(node, 'parent'):
|
||||
assert node.parent != node
|
||||
@@ -176,6 +178,34 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
return
|
||||
|
||||
def table_r_node(self, node):
|
||||
"""General pattern where the last node should should
|
||||
get the text span attributes of the entire tree"""
|
||||
start = len(self.f.getvalue())
|
||||
try:
|
||||
self.default(node)
|
||||
except GenericASTTraversalPruningException:
|
||||
final = len(self.f.getvalue())
|
||||
self.set_pos_info(node, start, final)
|
||||
self.set_pos_info(node[-1], start, final)
|
||||
raise GenericASTTraversalPruningException
|
||||
|
||||
n_slice0 = n_slice1 = n_slice2 = n_slice3 = n_binary_subscr = table_r_node
|
||||
n_augassign_1 = n_print_item = exec_stmt = print_to_item = del_stmt = table_r_node
|
||||
n_classdefco1 = n_classdefco2 = except_cond1 = except_cond2 = table_r_node
|
||||
|
||||
def n_passtmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
self.set_pos_info(node, start, start+len("pass"))
|
||||
self.default(node)
|
||||
|
||||
def n_trystmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
self.set_pos_info(node[0], start, start+len("try:"))
|
||||
self.default(node)
|
||||
|
||||
n_tryelsestmt = n_tryelsestmtc = n_tryelsestmtl = n_tryfinallystmt = n_trystmt
|
||||
|
||||
def n_return_stmt(self, node):
|
||||
start = len(self.f.getvalue()) + len(self.indent)
|
||||
if self.params['isLambda']:
|
||||
@@ -229,6 +259,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(' ')
|
||||
node[0].parent = node
|
||||
self.preorder(node[0])
|
||||
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.prune() # stop recursing
|
||||
|
||||
@@ -366,6 +397,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(sep); sep = ", "
|
||||
self.preorder(subnode)
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.set_pos_info(node[-1], start, len(self.f.getvalue()))
|
||||
self.println()
|
||||
self.prune() # stop recursing
|
||||
|
||||
@@ -1089,7 +1121,6 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def traverse(self, node, indent=None, isLambda=False):
|
||||
'''Buulds up fragment which can be used inside a larger
|
||||
block of code'''
|
||||
|
||||
self.param_stack.append(self.params)
|
||||
if indent is None: indent = self.indent
|
||||
p = self.pending_newlines
|
||||
@@ -1184,16 +1215,38 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
if elided: selectedLine += ' ...'
|
||||
|
||||
if isinstance(nodeInfo, Token):
|
||||
nodeInfo = nodeInfo.parent
|
||||
else:
|
||||
nodeInfo = nodeInfo
|
||||
|
||||
if isinstance(nodeInfo, AST):
|
||||
nonterminal = nodeInfo[0]
|
||||
else:
|
||||
nonterminal = nodeInfo.node
|
||||
|
||||
return ExtractInfo(lineNo = len(lines), lineStartOffset = lineStart,
|
||||
markerLine = markerLine,
|
||||
selectedLine = selectedLine,
|
||||
selectedText = selectedText)
|
||||
selectedText = selectedText,
|
||||
nonterminal = nonterminal)
|
||||
|
||||
def extract_line_info(self, name, offset):
|
||||
if (name, offset) not in list(self.offsets.keys()):
|
||||
return None
|
||||
return self.extract_node_info(self.offsets[name, offset])
|
||||
|
||||
def prev_node(self, node):
|
||||
prev = None
|
||||
if not hasattr(node, 'parent'):
|
||||
return prev
|
||||
p = node.parent
|
||||
for n in p:
|
||||
if node == n:
|
||||
return prev
|
||||
prev = n
|
||||
return prev
|
||||
|
||||
def extract_parent_info(self, node):
|
||||
if not hasattr(node, 'parent'):
|
||||
return None, None
|
||||
@@ -1557,25 +1610,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.set_pos_info(startnode, startnode_start, fin)
|
||||
|
||||
# FIXME rocky: figure out how to get these casess to be table driven.
|
||||
#
|
||||
# 1. for loops. For loops have two positions that correspond to a single text
|
||||
# location. In "for i in ..." there is the initialization "i" code as well
|
||||
# as the iteration code with "i". A "copy" spec like %X3,3 - copy parame
|
||||
# 3 to param 2 would work
|
||||
#
|
||||
# 2. subroutine calls. It the last op is the call and for purposes of printing
|
||||
# we don't need to print anything special there. However it encompases the
|
||||
# entire string of the node fn(...)
|
||||
match = re.search(r'^try', startnode.type)
|
||||
match = re.search(r'^call_function', startnode.type)
|
||||
if match:
|
||||
self.set_pos_info(node[0], startnode_start, startnode_start+len("try:"))
|
||||
self.set_pos_info(node[2], node[3].finish, node[3].finish)
|
||||
else:
|
||||
match = re.search(r'^call_function', startnode.type)
|
||||
if match:
|
||||
last_node = startnode[-1]
|
||||
# import traceback; traceback.print_stack()
|
||||
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
||||
last_node = startnode[-1]
|
||||
# import traceback; traceback.print_stack()
|
||||
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
||||
return
|
||||
|
||||
@classmethod
|
||||
@@ -1657,6 +1699,13 @@ def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
||||
if deparsed.ERROR:
|
||||
raise deparsed.ERROR
|
||||
|
||||
# To keep the API consistent with previous releases, convert
|
||||
# deparse.offset values into NodeInfo items
|
||||
for tup, node in deparsed.offsets.items():
|
||||
deparsed.offsets[tup] = NodeInfo(node = node, start = node.start,
|
||||
finish = node.finish)
|
||||
|
||||
deparsed.scanner = scanner
|
||||
return deparsed
|
||||
|
||||
from bisect import bisect_right
|
||||
@@ -1691,6 +1740,7 @@ def deparse_code_around_offset(name, offset, version, co, out=StringIO(),
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
from uncompyle6 import IS_PYPY
|
||||
def deparse_test(co, is_pypy=IS_PYPY):
|
||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
walk = deparse_code(sys_version, co, showasm=False, showast=False,
|
||||
|
@@ -163,6 +163,9 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
suffix = ''
|
||||
|
||||
no_paramnames = len(paramnames[:i]) == 0
|
||||
|
||||
for param in paramnames[:i]:
|
||||
self.write(suffix, param)
|
||||
suffix = ', '
|
||||
@@ -182,6 +185,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
no_paramnames = False
|
||||
self.write(suffix)
|
||||
param = paramnames[i]
|
||||
self.write(param)
|
||||
@@ -189,7 +193,11 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
aa = annotate_args[param]
|
||||
if isinstance(aa, tuple):
|
||||
aa = aa[0]
|
||||
self.write(': "%s"' % aa)
|
||||
self.write(': "%s"' % aa)
|
||||
elif isinstance(aa, AST):
|
||||
self.write(': ')
|
||||
self.preorder(aa)
|
||||
|
||||
self.write('=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
@@ -202,64 +210,65 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
if kw_args + annotate_argc > 0:
|
||||
if not code_has_star_arg(code):
|
||||
if argc > 0:
|
||||
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
i += 1
|
||||
if no_paramnames:
|
||||
if not code_has_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
pass
|
||||
annotate_args = []
|
||||
for n in node:
|
||||
if n == 'annotate_arg':
|
||||
annotate_args.append(n[0])
|
||||
elif n == 'annotate_tuple':
|
||||
t = n[0].attr
|
||||
if t[-1] == 'return':
|
||||
t = t[0:-1]
|
||||
annotate_args = annotate_args[:-1]
|
||||
pass
|
||||
last = len(annotate_args) - 1
|
||||
for i in range(len(annotate_args)):
|
||||
self.write("%s: " % (t[i]))
|
||||
self.preorder(annotate_args[i])
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
if (line_number != self.line_number):
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
pass
|
||||
i += 1
|
||||
pass
|
||||
break
|
||||
pass
|
||||
annotate_args = []
|
||||
for n in node:
|
||||
if n == 'annotate_arg':
|
||||
annotate_args.append(n[0])
|
||||
elif n == 'annotate_tuple':
|
||||
t = n[0].attr
|
||||
if t[-1] == 'return':
|
||||
t = t[0:-1]
|
||||
annotate_args = annotate_args[:-1]
|
||||
pass
|
||||
last = len(annotate_args) - 1
|
||||
for i in range(len(annotate_args)):
|
||||
self.write("%s: " % (t[i]))
|
||||
self.preorder(annotate_args[i])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
pass
|
||||
pass
|
||||
break
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if 'return' in annotate_tuple[0].attr:
|
||||
if (line_number != self.line_number):
|
||||
if (line_number != self.line_number) and not no_paramnames:
|
||||
self.write("\n" + indent)
|
||||
line_number = self.line_number
|
||||
self.write(' -> ')
|
||||
@@ -402,7 +411,8 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
if argc + kw_pairs > 0:
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
@@ -428,10 +438,34 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
|
||||
|
||||
def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
"""Dump function definition, doc string, and function body."""
|
||||
"""Dump function definition, doc string, and function body in
|
||||
Python version 3.0 and above
|
||||
"""
|
||||
|
||||
# FIXME: call make_function3 if we are self.version >= 3.0
|
||||
# and then simplify the below.
|
||||
# For Python 3.3, the evaluation stack in MAKE_FUNCTION is:
|
||||
|
||||
# * default argument objects in positional order
|
||||
# * pairs of name and default argument, with the name just below
|
||||
# the object on the stack, for keyword-only parameters
|
||||
# * parameter annotation objects
|
||||
# * a tuple listing the parameter names for the annotations
|
||||
# (only if there are ony annotation objects)
|
||||
# * the code associated with the function (at TOS1)
|
||||
# * the qualified name of the function (at TOS)
|
||||
|
||||
# For Python 3.0 .. 3.2 the evaluation stack is:
|
||||
# The function object is defined to have argc default parameters,
|
||||
# which are found below TOS.
|
||||
# * first come positional args in the order they are given in the source,
|
||||
# * next come the keyword args in the order they given in the source,
|
||||
# * finally is the code associated with the function (at TOS)
|
||||
#
|
||||
# Note: There is no qualified name at TOS
|
||||
|
||||
# MAKE_CLOSURE adds an additional closure slot
|
||||
|
||||
# Thank you, Python, for a such a well-thought out system that has
|
||||
# changed 4 or so times.
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
@@ -451,21 +485,31 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
|
||||
# Python 3.3+ adds a qualified name at TOS (-1)
|
||||
# moving down the LOAD_LAMBDA instruction
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
if self.version <= 3.3 and len(node) > 2 and node[-3] != 'LOAD_LAMBDA':
|
||||
# positional args are after kwargs
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
if self.version <= 3.3 and len(node) > 2 and node[lambda_index] != 'LOAD_LAMBDA':
|
||||
# args are after kwargs; kwargs are bundled as one node
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
else:
|
||||
# positional args are before kwargs
|
||||
# args are before kwargs; kwags as bundled as one node
|
||||
defparams = node[:args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
if self.version < 3.6:
|
||||
defparams = node[:args_node.attr]
|
||||
else:
|
||||
default, kw, annotate, closure = args_node.attr
|
||||
# FIXME: start here.
|
||||
# FIXME: start here for Python 3.6 and above:
|
||||
defparams = []
|
||||
# if default:
|
||||
# defparams = node[-(2 + kw + annotate + closure)]
|
||||
@@ -475,12 +519,6 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
@@ -496,7 +534,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
if not 3.0 <= self.version <= 3.1:
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
|
||||
try:
|
||||
@@ -510,58 +548,27 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
if self.version != 3.2:
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
params = [build_param(ast, name, d) for
|
||||
name, d in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
|
||||
if not 3.0 <= self.version <= 3.1:
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
if self.version > 3.0:
|
||||
params.append('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
else:
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if isLambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
if code_has_star_arg(code):
|
||||
if self.version > 3.0:
|
||||
params.append('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if isLambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
else:
|
||||
if isLambda:
|
||||
self.write("lambda ")
|
||||
else:
|
||||
self.write("(")
|
||||
pass
|
||||
|
||||
last_line = self.f.getvalue().split("\n")[-1]
|
||||
l = len(last_line)
|
||||
indent = ' ' * l
|
||||
line_number = self.line_number
|
||||
|
||||
if code_has_star_arg(code):
|
||||
self.write('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
argc += 1
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
self.write(", ".join(paramnames[:i]))
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
self.write(paramnames[i] + '=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
if (line_number != self.line_number):
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
else:
|
||||
suffix = ', '
|
||||
self.write("(", ", ".join(params))
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
|
@@ -311,6 +311,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
break
|
||||
|
||||
# FIXME: the real situation is that when derived from
|
||||
# funcdef_annotate we the name has been filled in.
|
||||
# But when derived from funcdefdeco it hasn't Would like a better
|
||||
# way to distinquish.
|
||||
if self.f.getvalue()[-4:] == 'def ':
|
||||
self.write(code.attr.co_name)
|
||||
|
||||
# FIXME: handle and pass full annotate args
|
||||
make_function3_annotate(self, node, isLambda=False,
|
||||
codeNode=code, annotate_last=annotate_last)
|
||||
@@ -358,9 +365,33 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prec = p
|
||||
self.prune()
|
||||
self.n_async_call_function = n_async_call_function
|
||||
|
||||
self.n_build_list_unpack = self.n_build_list
|
||||
|
||||
if version == 3.5:
|
||||
def n_call_function(node):
|
||||
mapping = self._get_mapping(node)
|
||||
table = mapping[0]
|
||||
key = node
|
||||
for i in mapping[1:]:
|
||||
key = key[i]
|
||||
pass
|
||||
if key.type.startswith('CALL_FUNCTION_VAR_KW'):
|
||||
# Python 3.5 changes the stack position of *args. kwargs come
|
||||
# after *args whereas in earlier Pythons, *args is at the end
|
||||
# which simpilfiies things from our perspective.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
# We will just swap the order to make it look like earlier Python 3.
|
||||
entry = table[key.type]
|
||||
kwarg_pos = entry[2][1]
|
||||
args_pos = kwarg_pos - 1
|
||||
# Put last node[args_pos] after subsequent kwargs
|
||||
while node[kwarg_pos] == 'kwarg' and kwarg_pos < len(node):
|
||||
# swap node[args_pos] with node[kwargs_pos]
|
||||
node[kwarg_pos], node[args_pos] = node[args_pos], node[kwarg_pos]
|
||||
args_pos = kwarg_pos
|
||||
kwarg_pos += 1
|
||||
self.default(node)
|
||||
self.n_call_function = n_call_function
|
||||
|
||||
def n_funcdef(node):
|
||||
if self.version == 3.6:
|
||||
@@ -525,6 +556,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
if self.pending_newlines:
|
||||
out = out[:-self.pending_newlines]
|
||||
if isinstance(out, str) and not PYTHON3:
|
||||
out = unicode(out, 'utf-8')
|
||||
self.f.write(out)
|
||||
|
||||
def println(self, *data):
|
||||
@@ -547,26 +580,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node == AST('return_stmt',
|
||||
[AST('ret_expr', [NONE]), Token('RETURN_VALUE')]))
|
||||
|
||||
def n_continue_stmt(self, node):
|
||||
if self.version >= 3.0 and node[0] == 'CONTINUE':
|
||||
t = node[0]
|
||||
if not t.linestart:
|
||||
# Artificially-added "continue" statements derived from JUMP_ABSOLUTE
|
||||
# don't have line numbers associated with them.
|
||||
# If this is a CONTINUE is to the same target as a JUMP_ABSOLUTE following it,
|
||||
# then the "continue" can be suppressed.
|
||||
op, offset = t.op, t.offset
|
||||
next_offset = self.scanner.next_offset(op, offset)
|
||||
scanner = self.scanner
|
||||
code = scanner.code
|
||||
if next_offset < len(code):
|
||||
next_inst = code[next_offset]
|
||||
if (scanner.opc.opname[next_inst] == 'JUMP_ABSOLUTE'
|
||||
and t.pattr == code[next_offset+1]):
|
||||
# Suppress "continue"
|
||||
self.prune()
|
||||
self.default(node)
|
||||
|
||||
def n_return_stmt(self, node):
|
||||
if self.params['isLambda']:
|
||||
self.preorder(node[0])
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.10.1'
|
||||
VERSION='2.11.4'
|
||||
|
Reference in New Issue
Block a user