You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Compare commits
59 Commits
release-2.
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
a67891c563 | ||
|
31413be7a1 | ||
|
98a6f47ad6 | ||
|
2e3e6658ee | ||
|
85c562cb36 | ||
|
5ab3e52c9c | ||
|
004ce5c491 | ||
|
599ceddd08 | ||
|
6547d0230f | ||
|
a65443ee02 | ||
|
2bd850f297 | ||
|
90477edf04 | ||
|
c912d16b50 | ||
|
8dd405a5ee | ||
|
116a22a425 | ||
|
7d771b9a8c | ||
|
a1972bbc08 | ||
|
8a91081535 | ||
|
0958dc889d | ||
|
33a0c75b69 | ||
|
7ccbd419c6 | ||
|
a45ee15cf2 | ||
|
fb5ad76c4e | ||
|
d8598f61e4 | ||
|
5f52cce24d | ||
|
70463e036a | ||
|
7fba24198f | ||
|
e06a90ed27 | ||
|
d030a04c1a | ||
|
37d5a05241 | ||
|
5d27832d6f | ||
|
6b98432082 | ||
|
109e813058 | ||
|
4b8cb11d77 | ||
|
c77e9cdaf8 | ||
|
4c2f0df3dc | ||
|
b49d30266f | ||
|
65a16327ce | ||
|
fff09db66e | ||
|
3ef0325cb8 | ||
|
3a6f9d8f24 | ||
|
d14865c1be | ||
|
152935ab26 | ||
|
5c9c0228ee | ||
|
ac121076e6 | ||
|
04ae94ee9e | ||
|
e8ed17967c | ||
|
3f7c4209d9 | ||
|
f33f425692 | ||
|
5ffd9b2be7 | ||
|
87dc5ad80c | ||
|
177a422b87 | ||
|
3a78332d59 | ||
|
5e801b5d74 | ||
|
2523b340cd | ||
|
c3f6fa32db | ||
|
6dc9d3ab2f | ||
|
74f440bd0b | ||
|
1be53ca729 |
265
ChangeLog
265
ChangeLog
@@ -1,6 +1,265 @@
|
|||||||
|
2016-08-20 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/version.py: Get ready for release 2.8.1
|
||||||
|
|
||||||
|
2016-08-16 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/simple_source/bug22/05_test_yield.py,
|
||||||
|
uncompyle6/scanners/scanner2.py: Python 2.2 doesn't have opcode
|
||||||
|
LIST_APPEND
|
||||||
|
|
||||||
|
2016-08-16 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/scanners/scanner26.py: Python 2.2 scanner bug: don't
|
||||||
|
mung IMPORT_NAME op
|
||||||
|
|
||||||
|
2016-08-16 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/simple_source/bug_pypy27/02_call_method.py,
|
||||||
|
uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse3.py: Small
|
||||||
|
pypy LOOKUP_METHOD cleanups
|
||||||
|
|
||||||
|
2016-08-16 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* : Merge pull request #49 from moagstar/master Fixed a bug with FORMAT_VALUE with sub expressions.
|
||||||
|
|
||||||
|
2016-08-16 DanielBradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* pytest/test_fstring.py, uncompyle6/parsers/parse3.py: Fixed bug
|
||||||
|
with FORMAT_VALUE where a sub expression would not be correctly
|
||||||
|
interpreted
|
||||||
|
|
||||||
|
2016-08-14 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* __pkginfo__.py: xdis 2.1.1 removes some bugs encountered here
|
||||||
|
|
||||||
|
2016-08-14 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* : commit a65443ee0225933367be9c7640629298882532c9 Author: rocky
|
||||||
|
<rb@dustyfeet.com> Date: Sun Aug 14 22:59:34 2016 -0400
|
||||||
|
|
||||||
|
2016-08-14 DanielBradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* pytest/test_fstring.py: added examples for known failures
|
||||||
|
|
||||||
|
2016-08-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* : Merge pull request #45 from rocky/revert-43-patch-1 Revert "Cache pip installation in travis"
|
||||||
|
|
||||||
|
2016-08-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* .travis.yml: Revert "Test with latest PyPy in Travis"
|
||||||
|
|
||||||
|
2016-08-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* : Merge pull request #44 from thedrow/patch-2 Test with latest PyPy in Travis
|
||||||
|
|
||||||
|
2016-08-14 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* : Merge pull request #43 from thedrow/patch-1 Cache pip installation in travis
|
||||||
|
|
||||||
|
2016-08-13 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/Makefile: Back off of 3.6 testing for now
|
||||||
|
|
||||||
|
2016-08-13 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* : commit 7ccbd419c6b26e8ae9d0929f1bfddedebce6bbaf Author: rocky
|
||||||
|
<rb@dustyfeet.com> Date: Sat Aug 13 20:25:19 2016 -0400
|
||||||
|
|
||||||
|
2016-08-13 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/Makefile: I said - we test 2.2 now.
|
||||||
|
|
||||||
|
2016-08-13 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/Makefile, test/simple_source/stmts/00_import.py,
|
||||||
|
test/test_pythonlib.py: Include Python 2.2 in testing
|
||||||
|
|
||||||
|
2016-08-13 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* README.rst, uncompyle6/parser.py, uncompyle6/parsers/parse22.py,
|
||||||
|
uncompyle6/scanner.py, uncompyle6/scanners/scanner22.py,
|
||||||
|
uncompyle6/scanners/scanner23.py, uncompyle6/scanners/scanner24.py,
|
||||||
|
uncompyle6/scanners/scanner25.py, uncompyle6/semantics/pysource.py:
|
||||||
|
Start handling Python 2.2 bytecode and... Fix some bugs in Python 2.3-2.5 bytecode handling
|
||||||
|
|
||||||
|
2016-08-11 Omer Katz <omer.drow@gmail.com>
|
||||||
|
|
||||||
|
* .travis.yml: Test with latest PyPy.
|
||||||
|
|
||||||
|
2016-08-11 Omer Katz <omer.drow@gmail.com>
|
||||||
|
|
||||||
|
* .travis.yml: Cache pip installation
|
||||||
|
|
||||||
|
2016-08-10 DanielBradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* pytest/test_fstring.py: small formatting change
|
||||||
|
|
||||||
|
2016-08-10 DanielBradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* pytest/test_fstring.py, requirements-dev.txt: added hypothesis to
|
||||||
|
requirements-dev
|
||||||
|
|
||||||
|
2016-08-10 DanielBradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* pytest/test_fstring.py: added hypothesis test (currently failing
|
||||||
|
due to limited support) for testing fstring uncompyling
|
||||||
|
|
||||||
|
2016-08-10 Daniel Bradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* : Merge pull request #2 from rocky/master Merging pypy and cpython 3.6 from rocky
|
||||||
|
|
||||||
|
2016-08-03 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* : commit 109e813058380630bda82014eee94a9089cc4666 Author: rocky
|
||||||
|
<rb@dustyfeet.com> Date: Wed Aug 3 08:07:47 2016 -0400
|
||||||
|
|
||||||
|
2016-08-01 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* __pkginfo__.py, requirements.txt: Need recent xdis fix for 3.6
|
||||||
|
wordcode
|
||||||
|
|
||||||
|
2016-08-01 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/simple_source/bug36/01_fstring.py: Add Python 3.6
|
||||||
|
formatted_str test
|
||||||
|
|
||||||
|
2016-08-01 R. Bernstein <rocky@users.noreply.github.com>
|
||||||
|
|
||||||
|
* : Merge pull request #41 from rocky/3.6 Move forward on moagstar's Python 3.6 support
|
||||||
|
|
||||||
|
2016-08-01 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* Makefile, README.rst, test/Makefile, test/test_pythonlib.py,
|
||||||
|
uncompyle6/bin/uncompile.py, uncompyle6/parsers/parse3.py,
|
||||||
|
uncompyle6/parsers/parse36.py, uncompyle6/scanner.py,
|
||||||
|
uncompyle6/scanners/scanner36.py, uncompyle6/semantics/aligner.py,
|
||||||
|
uncompyle6/semantics/pysource.py, uncompyle6/verify.py: Moagstar's
|
||||||
|
3.6 wordcode + formattedValue rules
|
||||||
|
|
||||||
|
2016-07-30 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/semantics/aligner.py: WIP try to keep line numbers the
|
||||||
|
same
|
||||||
|
|
||||||
|
2016-07-29 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/Makefile, uncompyle6/semantics/fragments.py: Small changes
|
||||||
|
|
||||||
|
2016-07-29 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/parsers/parse35.py, uncompyle6/scanner.py,
|
||||||
|
uncompyle6/scanners/scanner3.py, uncompyle6/semantics/pysource.py:
|
||||||
|
Fix 3.5 misclassifying RETURN_VALUE We use location of SETUP_EXCEPT instructions to disambiguate.
|
||||||
|
|
||||||
|
2016-07-28 Daniel Bradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
|
* README.rst, test/Makefile, test/bytecode_3.6/fstring.py,
|
||||||
|
test/bytecode_3.6/fstring_single.py, test/test_pythonlib.py,
|
||||||
|
uncompyle6/bin/uncompile.py, uncompyle6/parser.py,
|
||||||
|
uncompyle6/parsers/parse3.py, uncompyle6/scanner.py,
|
||||||
|
uncompyle6/scanners/scanner36.py, uncompyle6/semantics/pysource.py,
|
||||||
|
uncompyle6/verify.py: Starting adding python 3.6 support to
|
||||||
|
uncompyle
|
||||||
|
|
||||||
|
2016-07-28 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse23.py,
|
||||||
|
uncompyle6/parsers/parse24.py: while1 bug applied to Python 2.3 and
|
||||||
|
2.4
|
||||||
|
|
||||||
|
2016-07-28 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/scanners/scanner3.py, uncompyle6/scanners/tok.py: PyPy
|
||||||
|
3.2 bug confusing RETURN_END_IF for except Also fix a instruction formatting bug
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/parser.py, uncompyle6/parsers/parse3.py,
|
||||||
|
uncompyle6/parsers/parse35.py: Split out 3.5 parser
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* pytest/test_grammar.py, uncompyle6/parser.py,
|
||||||
|
uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse3.py,
|
||||||
|
uncompyle6/parsers/parse34.py: Add python 3.4 grammar checking DRY grammar testing
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/parsers/parse25.py, uncompyle6/parsers/parse26.py,
|
||||||
|
uncompyle6/parsers/parse27.py, uncompyle6/parsers/parse3.py,
|
||||||
|
uncompyle6/parsers/parse34.py: Clean and check Python 2.6 grammar
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* pytest/test_grammar.py, uncompyle6/parser.py,
|
||||||
|
uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse26.py,
|
||||||
|
uncompyle6/parsers/parse27.py, uncompyle6/parsers/parse3.py: Start
|
||||||
|
to segregate and clean up grammar
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* pytest/test_grammar.py, uncompyle6/disas.py,
|
||||||
|
uncompyle6/scanner.py, uncompyle6/semantics/fragments.py: Add
|
||||||
|
is_pypy parameter to places that need it
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* test/simple_source/stmts/09_whiletrue_bug.py,
|
||||||
|
uncompyle6/parser.py, uncompyle6/parsers/parse2.py,
|
||||||
|
uncompyle6/parsers/parse26.py, uncompyle6/parsers/parse27.py,
|
||||||
|
uncompyle6/scanners/scanner2.py, uncompyle6/scanners/scanner3.py,
|
||||||
|
uncompyle6/semantics/pysource.py: 2.6 and 2.7 while1 grammar rule Fixes issue #40
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* pytest/test_grammar.py, uncompyle6/parser.py,
|
||||||
|
uncompyle6/parsers/parse3.py: Start grammar checker
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/main.py, uncompyle6/show.py: Show magic number in
|
||||||
|
output Fix bugs due to removal of token.format()
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/disas.py, uncompyle6/parsers/parse2.py,
|
||||||
|
uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner2.py,
|
||||||
|
uncompyle6/scanners/scanner27.py, uncompyle6/scanners/scanner3.py,
|
||||||
|
uncompyle6/scanners/scanner35.py, uncompyle6/scanners/tok.py,
|
||||||
|
uncompyle6/show.py: tok.format -> tok.__str__; simplify pypy code
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/semantics/pysource.py, uncompyle6/verify.py: Python 2.7
|
||||||
|
set comprehension bug
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/semantics/pysource.py: separate semantic action version
|
||||||
|
differences Added customize_for_version which uses is_pypy and version to adjust
|
||||||
|
tables
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/semantics/fragments.py,
|
||||||
|
uncompyle6/semantics/pysource.py: Customize tables better for
|
||||||
|
specific Python versions
|
||||||
|
|
||||||
|
2016-07-27 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* uncompyle6/parsers/parse2.py, uncompyle6/semantics/pysource.py:
|
||||||
|
Small code clean up
|
||||||
|
|
||||||
2016-07-26 rocky <rb@dustyfeet.com>
|
2016-07-26 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
* README.rst, __pkginfo__.py, requirements.txt,
|
* uncompyle6/scanners/tok.py, uncompyle6/semantics/fragments.py,
|
||||||
|
uncompyle6/verify.py: Usuability fixes * try using format for __str__ * Explicitly nuke self.attr and self.pattr when no arg * Sync pysource and format wrt make_function
|
||||||
|
|
||||||
|
2016-07-26 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
* ChangeLog, NEWS, README.rst, __pkginfo__.py, requirements.txt,
|
||||||
test/test_pyenvlib.py, uncompyle6/version.py: Get ready for release
|
test/test_pyenvlib.py, uncompyle6/version.py: Get ready for release
|
||||||
2.7.1
|
2.7.1
|
||||||
|
|
||||||
@@ -105,9 +364,9 @@
|
|||||||
uncompyle6/scanners/scanner3.py, uncompyle6/semantics/pysource.py:
|
uncompyle6/scanners/scanner3.py, uncompyle6/semantics/pysource.py:
|
||||||
PyPy support * Use proper PYPY 32 opcodes * handle opcodes LOOKUP_METHOD and CALL_METHOD * Administrative stuff for PyPy
|
PyPy support * Use proper PYPY 32 opcodes * handle opcodes LOOKUP_METHOD and CALL_METHOD * Administrative stuff for PyPy
|
||||||
|
|
||||||
2016-07-24 rocky <rb@dustyfeet.com>
|
2016-07-24 Daniel Bradburn <moagstar@gmail.com>
|
||||||
|
|
||||||
* test/add-test.py: add-test: Make sure PyPy bytecode is separated
|
* : Merge pull request #1 from rocky/master Syncing with rocky
|
||||||
|
|
||||||
2016-07-24 rocky <rb@dustyfeet.com>
|
2016-07-24 rocky <rb@dustyfeet.com>
|
||||||
|
|
||||||
|
2
Makefile
2
Makefile
@@ -33,7 +33,7 @@ check-2.7 check-3.3 check-3.4: pytest
|
|||||||
|
|
||||||
#: Tests for Python 3.2 and 3.5 - pytest doesn't work here
|
#: Tests for Python 3.2 and 3.5 - pytest doesn't work here
|
||||||
# Or rather 3.5 doesn't work not on Travis
|
# Or rather 3.5 doesn't work not on Travis
|
||||||
check-3.2 check-3.5:
|
check-3.2 check-3.5 check-3.6:
|
||||||
$(MAKE) -C test $@
|
$(MAKE) -C test $@
|
||||||
|
|
||||||
#:Tests for Python 2.6 (doesn't have pytest)
|
#:Tests for Python 2.6 (doesn't have pytest)
|
||||||
|
19
NEWS
19
NEWS
@@ -1,3 +1,22 @@
|
|||||||
|
uncompyle6 2.8.1 2016-08-20
|
||||||
|
|
||||||
|
- Add Python 2.2 decompilation
|
||||||
|
|
||||||
|
- Fix bugs
|
||||||
|
* PyPy LOOKUP_METHOD bug
|
||||||
|
* Python 3.6 FORMAT_VALUE handles expressions now
|
||||||
|
|
||||||
|
uncompyle6 2.8.0 2016-08-03
|
||||||
|
|
||||||
|
- Start Python 3.6 support (moagstar)
|
||||||
|
more work on PEP 498 needed
|
||||||
|
- tidy bytecode/word output
|
||||||
|
- numerous decompiling bugs fixed
|
||||||
|
- grammar testing started
|
||||||
|
- show magic number in deparsed output
|
||||||
|
- better grammar and semantic action segregation based
|
||||||
|
on python bytecode version
|
||||||
|
|
||||||
uncompyle6 2.7.1 2016-07-26
|
uncompyle6 2.7.1 2016-07-26
|
||||||
|
|
||||||
- PyPy bytecodes for 2.7 and 3.2 added
|
- PyPy bytecodes for 2.7 and 3.2 added
|
||||||
|
@@ -11,7 +11,7 @@ Introduction
|
|||||||
------------
|
------------
|
||||||
|
|
||||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||||
source code. It accepts bytecodes from Python version 2.3 to 3.5 or
|
source code. It accepts bytecodes from Python version 2.2 to 3.6 or
|
||||||
so, including PyPy bytecode.
|
so, including PyPy bytecode.
|
||||||
|
|
||||||
Why this?
|
Why this?
|
||||||
@@ -45,7 +45,7 @@ Requirements
|
|||||||
|
|
||||||
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
||||||
The bytecode files it can read has been tested on Python bytecodes from
|
The bytecode files it can read has been tested on Python bytecodes from
|
||||||
versions 2.3-2.7, and 3.2-3.5 and the above-mentioned PyPy versions.
|
versions 2.2-2.7, and 3.2-3.6 and the above-mentioned PyPy versions.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
|
@@ -37,7 +37,7 @@ entry_points={
|
|||||||
]}
|
]}
|
||||||
ftp_url = None
|
ftp_url = None
|
||||||
install_requires = ['spark-parser >= 1.4.0',
|
install_requires = ['spark-parser >= 1.4.0',
|
||||||
'xdis >= 2.0.3']
|
'xdis >= 2.1.1']
|
||||||
license = 'MIT'
|
license = 'MIT'
|
||||||
mailing_list = 'python-debugger@googlegroups.com'
|
mailing_list = 'python-debugger@googlegroups.com'
|
||||||
modname = 'uncompyle6'
|
modname = 'uncompyle6'
|
||||||
|
136
pytest/test_fstring.py
Normal file
136
pytest/test_fstring.py
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
# std
|
||||||
|
import os
|
||||||
|
# test
|
||||||
|
import pytest
|
||||||
|
import hypothesis
|
||||||
|
from hypothesis import strategies as st
|
||||||
|
# uncompyle6
|
||||||
|
from uncompyle6 import PYTHON_VERSION, deparse_code
|
||||||
|
|
||||||
|
|
||||||
|
@st.composite
|
||||||
|
def expressions(draw):
|
||||||
|
# todo : would be nice to generate expressions using hypothesis however
|
||||||
|
# this is pretty involved so for now just use a corpus of expressions
|
||||||
|
# from which to select.
|
||||||
|
return draw(st.sampled_from((
|
||||||
|
'abc',
|
||||||
|
'len(items)',
|
||||||
|
'x + 1',
|
||||||
|
'lineno',
|
||||||
|
'container',
|
||||||
|
'self.attribute',
|
||||||
|
'self.method()',
|
||||||
|
'sorted(items, key=lambda x: x.name)',
|
||||||
|
'func(*args, **kwargs)',
|
||||||
|
'text or default',
|
||||||
|
)))
|
||||||
|
|
||||||
|
|
||||||
|
@st.composite
|
||||||
|
def format_specifiers(draw):
|
||||||
|
"""
|
||||||
|
Generate a valid format specifier using the rules:
|
||||||
|
|
||||||
|
format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]
|
||||||
|
fill ::= <any character>
|
||||||
|
align ::= "<" | ">" | "=" | "^"
|
||||||
|
sign ::= "+" | "-" | " "
|
||||||
|
width ::= integer
|
||||||
|
precision ::= integer
|
||||||
|
type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"
|
||||||
|
|
||||||
|
See https://docs.python.org/2/library/string.html
|
||||||
|
|
||||||
|
:param draw: Let hypothesis draw from other strategies.
|
||||||
|
|
||||||
|
:return: An example format_specifier.
|
||||||
|
"""
|
||||||
|
alphabet_strategy = st.characters(min_codepoint=ord('a'), max_codepoint=ord('z'))
|
||||||
|
fill = draw(st.one_of(alphabet_strategy, st.none()))
|
||||||
|
align = draw(st.sampled_from(list('<>=^')))
|
||||||
|
fill_align = (fill + align or '') if fill else ''
|
||||||
|
|
||||||
|
type_ = draw(st.sampled_from('bcdeEfFgGnosxX%'))
|
||||||
|
can_have_sign = type_ in 'deEfFgGnoxX%'
|
||||||
|
can_have_comma = type_ in 'deEfFgG%'
|
||||||
|
can_have_precision = type_ in 'fFgG'
|
||||||
|
can_have_pound = type_ in 'boxX%'
|
||||||
|
can_have_zero = type_ in 'oxX'
|
||||||
|
|
||||||
|
sign = draw(st.sampled_from(list('+- ') + [''])) if can_have_sign else ''
|
||||||
|
pound = draw(st.sampled_from(('#', '',))) if can_have_pound else ''
|
||||||
|
zero = draw(st.sampled_from(('0', '',))) if can_have_zero else ''
|
||||||
|
|
||||||
|
int_strategy = st.integers(min_value=1, max_value=1000)
|
||||||
|
|
||||||
|
width = draw(st.one_of(int_strategy, st.none()))
|
||||||
|
width = str(width) if width is not None else ''
|
||||||
|
|
||||||
|
comma = draw(st.sampled_from((',', '',))) if can_have_comma else ''
|
||||||
|
if can_have_precision:
|
||||||
|
precision = draw(st.one_of(int_strategy, st.none()))
|
||||||
|
precision = '.' + str(precision) if precision else ''
|
||||||
|
else:
|
||||||
|
precision = ''
|
||||||
|
|
||||||
|
return ''.join((fill_align, sign, pound, zero, width, comma, precision, type_,))
|
||||||
|
|
||||||
|
|
||||||
|
@st.composite
|
||||||
|
def fstrings(draw):
|
||||||
|
"""
|
||||||
|
Generate a valid f-string.
|
||||||
|
See https://www.python.org/dev/peps/pep-0498/#specification
|
||||||
|
|
||||||
|
:param draw: Let hypothsis draw from other strategies.
|
||||||
|
|
||||||
|
:return: A valid f-string.
|
||||||
|
"""
|
||||||
|
is_raw = draw(st.booleans())
|
||||||
|
integer_strategy = st.integers(min_value=0, max_value=3)
|
||||||
|
expression_count = draw(integer_strategy)
|
||||||
|
content = []
|
||||||
|
for _ in range(expression_count):
|
||||||
|
expression = draw(expressions())
|
||||||
|
# not yet : conversion not supported
|
||||||
|
conversion = ''#draw(st.sampled_from(('', '!s', '!r', '!a',)))
|
||||||
|
has_specifier = draw(st.booleans())
|
||||||
|
specifier = ':' + draw(format_specifiers()) if has_specifier else ''
|
||||||
|
content.append('{{{}{}}}'.format(expression, conversion, specifier))
|
||||||
|
content = ''.join(content)
|
||||||
|
|
||||||
|
return "f{}'{}'".format('r' if is_raw else '', content)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||||
|
@hypothesis.given(format_specifiers())
|
||||||
|
def test_format_specifiers(format_specifier):
|
||||||
|
"""Verify that format_specifiers generates valid specifiers"""
|
||||||
|
try:
|
||||||
|
exec('"{:' + format_specifier + '}".format(0)')
|
||||||
|
except ValueError as e:
|
||||||
|
if 'Unknown format code' not in str(e):
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||||
|
@hypothesis.given(fstrings())
|
||||||
|
def test_uncompyle_fstring(fstring):
|
||||||
|
"""Verify uncompyling fstring bytecode"""
|
||||||
|
|
||||||
|
# ignore fstring with no expressions an fsring with
|
||||||
|
# no expressions just gets compiled to a normal string.
|
||||||
|
hypothesis.assume('{' in fstring)
|
||||||
|
|
||||||
|
# BUG : At the moment a single expression is not supported
|
||||||
|
# for example f'{abc}'.
|
||||||
|
hypothesis.assume(fstring.count('{') > 1)
|
||||||
|
|
||||||
|
expr = fstring + '\n'
|
||||||
|
code = compile(expr, '<string>', 'single')
|
||||||
|
deparsed = deparse_code(PYTHON_VERSION, code, compile_mode='single')
|
||||||
|
recompiled = compile(deparsed.text, '<string>', 'single')
|
||||||
|
|
||||||
|
if recompiled != code:
|
||||||
|
assert deparsed.text == expr
|
44
pytest/test_grammar.py
Normal file
44
pytest/test_grammar.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import pytest, re
|
||||||
|
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
||||||
|
from uncompyle6.parser import get_python_parser
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
|
||||||
|
def test_grammar():
|
||||||
|
|
||||||
|
def check_tokens(tokens, opcode_set):
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
assert remain_tokens == set([]), \
|
||||||
|
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dumpGrammar())
|
||||||
|
|
||||||
|
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
expect_lhs = set(['expr1024', 'pos_arg'])
|
||||||
|
unused_rhs = set(['build_list', 'call_function', 'mkfunc', 'mklambda',
|
||||||
|
'unpack', 'unpack_list'])
|
||||||
|
expect_right_recursive = [['designList', ('designator', 'DUP_TOP', 'designList')]]
|
||||||
|
if PYTHON3:
|
||||||
|
expect_lhs.add('load_genexpr')
|
||||||
|
unused_rhs = unused_rhs.union(set("""
|
||||||
|
except_pop_except genexpr classdefdeco2 listcomp
|
||||||
|
""".split()))
|
||||||
|
else:
|
||||||
|
expect_lhs.add('kwarg')
|
||||||
|
assert expect_lhs == set(lhs)
|
||||||
|
assert unused_rhs == set(rhs)
|
||||||
|
assert expect_right_recursive == right_recursive
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
ignore_set = set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split())
|
||||||
|
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||||
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
|
check_tokens(tokens, opcode_set)
|
||||||
|
elif PYTHON_VERSION == 3.4:
|
||||||
|
ignore_set.add('LOAD_CLASSNAME')
|
||||||
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
|
check_tokens(tokens, opcode_set)
|
@@ -1,2 +1,3 @@
|
|||||||
pytest
|
pytest
|
||||||
flake8
|
flake8
|
||||||
|
hypothesis
|
@@ -1,2 +1,2 @@
|
|||||||
spark-parser >= 1.2.1
|
spark-parser >= 1.2.1
|
||||||
xdis >= 2.0.3
|
xdis >= 2.1.0
|
||||||
|
@@ -38,13 +38,18 @@ check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
|||||||
check-3.5: check-bytecode
|
check-3.5: check-bytecode
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-3.5 --verify $(COMPILE)
|
$(PYTHON) test_pythonlib.py --bytecode-3.5 --verify $(COMPILE)
|
||||||
|
|
||||||
|
#: Run working tests from Python 3.6
|
||||||
|
check-3.6: check-bytecode
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-3.6 --verify $(COMPILE)
|
||||||
|
|
||||||
#: Check deparsing only, but from a different Python version
|
#: Check deparsing only, but from a different Python version
|
||||||
check-disasm:
|
check-disasm:
|
||||||
$(PYTHON) dis-compare.py
|
$(PYTHON) dis-compare.py
|
||||||
|
|
||||||
#: Check deparsing bytecode 2.x only
|
#: Check deparsing bytecode 2.x only
|
||||||
check-bytecode-2:
|
check-bytecode-2:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-2.3 --bytecode-2.4 \
|
$(PYTHON) test_pythonlib.py \
|
||||||
|
--bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||||
--bytecode-2.5 --bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
--bytecode-2.5 --bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
||||||
|
|
||||||
#: Check deparsing bytecode 3.x only
|
#: Check deparsing bytecode 3.x only
|
||||||
@@ -54,9 +59,14 @@ check-bytecode-3:
|
|||||||
|
|
||||||
#: Check deparsing bytecode that works running Python 2 and Python 3
|
#: Check deparsing bytecode that works running Python 2 and Python 3
|
||||||
check-bytecode: check-bytecode-3
|
check-bytecode: check-bytecode-3
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-2.3 --bytecode-2.4 \
|
$(PYTHON) test_pythonlib.py \
|
||||||
|
--bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||||
--bytecode-2.5 --bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
--bytecode-2.5 --bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
||||||
|
|
||||||
|
#: Check deparsing Python 2.2
|
||||||
|
check-bytecode-2.3:
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-2.2
|
||||||
|
|
||||||
#: Check deparsing Python 2.3
|
#: Check deparsing Python 2.3
|
||||||
check-bytecode-2.3:
|
check-bytecode-2.3:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-2.3
|
$(PYTHON) test_pythonlib.py --bytecode-2.3
|
||||||
@@ -93,6 +103,10 @@ check-bytecode-3.4:
|
|||||||
check-bytecode-3.5:
|
check-bytecode-3.5:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-3.5
|
$(PYTHON) test_pythonlib.py --bytecode-3.5
|
||||||
|
|
||||||
|
#: Check deparsing Python 3.6
|
||||||
|
check-bytecode-3.6:
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-3.6
|
||||||
|
|
||||||
#: short tests for bytecodes only for this version of Python
|
#: short tests for bytecodes only for this version of Python
|
||||||
check-native-short:
|
check-native-short:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
||||||
@@ -118,7 +132,7 @@ check-3.4-ok:
|
|||||||
2.6:
|
2.6:
|
||||||
|
|
||||||
#: PyPy 5.0.x with Python 2.7 ...
|
#: PyPy 5.0.x with Python 2.7 ...
|
||||||
pypy-2.7 5.0:
|
pypy-2.7 5.0 5.3:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-pypy2.7 --verify
|
$(PYTHON) test_pythonlib.py --bytecode-pypy2.7 --verify
|
||||||
|
|
||||||
#: PyPy 2.4.x with Python 3.2 ...
|
#: PyPy 2.4.x with Python 3.2 ...
|
||||||
|
BIN
test/bytecode_2.2/00_assign.pyc
Normal file
BIN
test/bytecode_2.2/00_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/00_import.pyc
Normal file
BIN
test/bytecode_2.2/00_import.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/00_pass.pyc
Normal file
BIN
test/bytecode_2.2/00_pass.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/02_apply_equiv.pyc
Normal file
BIN
test/bytecode_2.2/02_apply_equiv.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/03_class_method.pyc
Normal file
BIN
test/bytecode_2.2/03_class_method.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/03_if_elif.pyc
Normal file
BIN
test/bytecode_2.2/03_if_elif.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.2/05_test_yield.pyc
Normal file
BIN
test/bytecode_2.2/05_test_yield.pyc
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_2.7/09_whiletrue_bug.pyc
Normal file
BIN
test/bytecode_2.7/09_whiletrue_bug.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6/01_fstring.pyc
Normal file
BIN
test/bytecode_3.6/01_fstring.pyc
Normal file
Binary file not shown.
5
test/bytecode_3.6/fstring.py
Normal file
5
test/bytecode_3.6/fstring.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
def fn(var1, var2):
|
||||||
|
return f'interpolate {var1} strings {var2} py36'
|
||||||
|
|
||||||
|
|
||||||
|
fn('a', 'b')
|
2
test/bytecode_3.6/fstring_single.py
Normal file
2
test/bytecode_3.6/fstring_single.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
def fn(var):
|
||||||
|
return f'interpolate {var} strings'
|
BIN
test/bytecode_pypy2.7/02_call_method.pyc
Normal file
BIN
test/bytecode_pypy2.7/02_call_method.pyc
Normal file
Binary file not shown.
27
test/simple_source/bug22/02_apply_equiv.py
Normal file
27
test/simple_source/bug22/02_apply_equiv.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# decompyle's test_appyEquiv.py
|
||||||
|
|
||||||
|
def kwfunc(**kwargs):
|
||||||
|
print kwargs.items()
|
||||||
|
|
||||||
|
|
||||||
|
def argsfunc(*args):
|
||||||
|
print args
|
||||||
|
|
||||||
|
|
||||||
|
def no_apply(*args, **kwargs):
|
||||||
|
print args
|
||||||
|
print kwargs.items()
|
||||||
|
argsfunc(34)
|
||||||
|
foo = argsfunc(*args)
|
||||||
|
argsfunc(*args)
|
||||||
|
argsfunc(34, *args)
|
||||||
|
kwfunc(**None)
|
||||||
|
kwfunc(x = 11, **None)
|
||||||
|
no_apply(*args, **args)
|
||||||
|
no_apply(34, *args, **args)
|
||||||
|
no_apply(x = 11, *args, **args)
|
||||||
|
no_apply(34, x = 11, *args, **args)
|
||||||
|
no_apply(42, 34, x = 11, *args, **args)
|
||||||
|
return foo
|
||||||
|
|
||||||
|
no_apply(1, 2, 4, 8, a = 2, b = 3, c = 5)
|
24
test/simple_source/bug22/05_test_yield.py
Normal file
24
test/simple_source/bug22/05_test_yield.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# From decompyle
|
||||||
|
# In Python 2.2 we don't have op LIST_APPEND while in > 2.3 we do.
|
||||||
|
|
||||||
|
from __future__ import generators
|
||||||
|
|
||||||
|
def inorder(t):
|
||||||
|
if t:
|
||||||
|
for x in inorder(t.left):
|
||||||
|
yield x
|
||||||
|
|
||||||
|
yield t.label
|
||||||
|
for x in inorder(t.right):
|
||||||
|
yield x
|
||||||
|
|
||||||
|
def generate_ints(n):
|
||||||
|
for i in range(n):
|
||||||
|
yield i * 2
|
||||||
|
|
||||||
|
for i in generate_ints(5):
|
||||||
|
print i,
|
||||||
|
|
||||||
|
print
|
||||||
|
gen = generate_ints(3)
|
||||||
|
print gen.next(), gen.next(), gen.next(), gen.next()
|
3
test/simple_source/bug36/01_fstring.py
Normal file
3
test/simple_source/bug36/01_fstring.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
var1 = 'x'
|
||||||
|
var2 = 'y'
|
||||||
|
print(f'interpolate {var1} strings {var2} py36')
|
4
test/simple_source/bug_pypy27/02_call_method.py
Normal file
4
test/simple_source/bug_pypy27/02_call_method.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Bug in PyPy was not handling CALL_METHOD_xxx like
|
||||||
|
# CALL_FUNCTION_XXX
|
||||||
|
def truncate(self, size=None):
|
||||||
|
self.db.put(self.key, '', txn=self.txn, dlen=self.len - size, doff=size)
|
@@ -3,3 +3,4 @@
|
|||||||
import sys
|
import sys
|
||||||
from os import path
|
from os import path
|
||||||
from os import *
|
from os import *
|
||||||
|
import time as time1, os as os1
|
||||||
|
@@ -1,9 +1,12 @@
|
|||||||
if args == ['-']:
|
if __file__ == ['-']:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
compile(filename, doraise=True)
|
compile(__file__, doraise=True)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
rv = 1
|
rv = 1
|
||||||
else:
|
else:
|
||||||
rv = 1
|
rv = 1
|
||||||
print(rv)
|
print(rv)
|
||||||
|
|
||||||
|
|
||||||
|
while 1:pass
|
||||||
|
@@ -72,13 +72,14 @@ test_options = {
|
|||||||
PYOC, 'base_2.7', 2.7),
|
PYOC, 'base_2.7', 2.7),
|
||||||
}
|
}
|
||||||
|
|
||||||
for vers in (2.7, 3.4, 3.5):
|
for vers in (2.7, 3.4, 3.5, 3.6):
|
||||||
pythonlib = "ok_lib%s" % vers
|
pythonlib = "ok_lib%s" % vers
|
||||||
key = "ok-%s" % vers
|
key = "ok-%s" % vers
|
||||||
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for vers in (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 'pypy3.2', 'pypy2.7'):
|
for vers in (2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||||
|
3.2, 3.3, 3.4, 3.5, 3.6, 'pypy3.2', 'pypy2.7'):
|
||||||
bytecode = "bytecode_%s" % vers
|
bytecode = "bytecode_%s" % vers
|
||||||
key = "bytecode-%s" % vers
|
key = "bytecode-%s" % vers
|
||||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||||
|
@@ -64,8 +64,8 @@ def usage():
|
|||||||
|
|
||||||
|
|
||||||
def main_bin():
|
def main_bin():
|
||||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7), (3, 2), (3, 3), (3, 4), (3, 5))):
|
if not (sys.version_info[0:2] in ((2, 6), (2, 7), (3, 2), (3, 3), (3, 4), (3, 5), (3, 6))):
|
||||||
print('Error: %s requires Python 2.6, 2.7, 3.2, 3.3, 3.4 or 3.5' % program,
|
print('Error: %s requires Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, or 3.6' % program,
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
|
@@ -28,7 +28,7 @@ from xdis.code import iscode
|
|||||||
from xdis.load import check_object_path, load_module
|
from xdis.load import check_object_path, load_module
|
||||||
from uncompyle6.scanner import get_scanner
|
from uncompyle6.scanner import get_scanner
|
||||||
|
|
||||||
def disco(version, co, out=None):
|
def disco(version, co, out=None, is_pypy=False):
|
||||||
"""
|
"""
|
||||||
diassembles and deparses a given code block 'co'
|
diassembles and deparses a given code block 'co'
|
||||||
"""
|
"""
|
||||||
@@ -42,7 +42,7 @@ def disco(version, co, out=None):
|
|||||||
print('# Embedded file name: %s' % co.co_filename,
|
print('# Embedded file name: %s' % co.co_filename,
|
||||||
file=real_out)
|
file=real_out)
|
||||||
|
|
||||||
scanner = get_scanner(version)
|
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||||
|
|
||||||
queue = deque([co])
|
queue = deque([co])
|
||||||
disco_loop(scanner.disassemble, queue, real_out)
|
disco_loop(scanner.disassemble, queue, real_out)
|
||||||
@@ -61,7 +61,7 @@ def disco_loop(disasm, queue, real_out):
|
|||||||
queue.append(t.pattr)
|
queue.append(t.pattr)
|
||||||
elif iscode(t.attr):
|
elif iscode(t.attr):
|
||||||
queue.append(t.attr)
|
queue.append(t.attr)
|
||||||
print(t.format(), file=real_out)
|
print(t, file=real_out)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -82,7 +82,7 @@ def disassemble_file(filename, outstream=None, native=False):
|
|||||||
for con in co:
|
for con in co:
|
||||||
disco(version, con, outstream)
|
disco(version, con, outstream)
|
||||||
else:
|
else:
|
||||||
disco(version, co, outstream)
|
disco(version, co, outstream, is_pypy=is_pypy)
|
||||||
co = None
|
co = None
|
||||||
|
|
||||||
def _test():
|
def _test():
|
||||||
|
@@ -12,7 +12,7 @@ from xdis.load import load_module
|
|||||||
def uncompyle(
|
def uncompyle(
|
||||||
version, co, out=None, showasm=False, showast=False,
|
version, co, out=None, showasm=False, showast=False,
|
||||||
timestamp=None, showgrammar=False, code_objects={},
|
timestamp=None, showgrammar=False, code_objects={},
|
||||||
is_pypy=False):
|
is_pypy=False, magic_int=None):
|
||||||
"""
|
"""
|
||||||
disassembles and deparses a given code block 'co'
|
disassembles and deparses a given code block 'co'
|
||||||
"""
|
"""
|
||||||
@@ -22,8 +22,10 @@ def uncompyle(
|
|||||||
real_out = out or sys.stdout
|
real_out = out or sys.stdout
|
||||||
co_pypy_str = 'PyPy ' if is_pypy else ''
|
co_pypy_str = 'PyPy ' if is_pypy else ''
|
||||||
run_pypy_str = 'PyPy ' if IS_PYPY else ''
|
run_pypy_str = 'PyPy ' if IS_PYPY else ''
|
||||||
print('# %sPython bytecode %s (disassembled from %sPython %s)\n' %
|
print('# %sPython bytecode %s%s disassembled from %sPython %s' %
|
||||||
(co_pypy_str, version, run_pypy_str, PYTHON_VERSION),
|
(co_pypy_str, version,
|
||||||
|
" (%d)" % magic_int if magic_int else "",
|
||||||
|
run_pypy_str, PYTHON_VERSION),
|
||||||
file=real_out)
|
file=real_out)
|
||||||
if co.co_filename:
|
if co.co_filename:
|
||||||
print('# Embedded file name: %s' % co.co_filename,
|
print('# Embedded file name: %s' % co.co_filename,
|
||||||
@@ -60,11 +62,11 @@ def uncompyle_file(filename, outstream=None, showasm=False, showast=False,
|
|||||||
for con in co:
|
for con in co:
|
||||||
uncompyle(version, con, outstream, showasm, showast,
|
uncompyle(version, con, outstream, showasm, showast,
|
||||||
timestamp, showgrammar, code_objects=code_objects,
|
timestamp, showgrammar, code_objects=code_objects,
|
||||||
is_pypy=is_pypy)
|
is_pypy=is_pypy, magic_int=magic_int)
|
||||||
else:
|
else:
|
||||||
uncompyle(version, co, outstream, showasm, showast,
|
uncompyle(version, co, outstream, showasm, showast,
|
||||||
timestamp, showgrammar, code_objects=code_objects,
|
timestamp, showgrammar, code_objects=code_objects,
|
||||||
is_pypy=is_pypy)
|
is_pypy=is_pypy, magic_int=magic_int)
|
||||||
co = None
|
co = None
|
||||||
|
|
||||||
# FIXME: combine into an options parameter
|
# FIXME: combine into an options parameter
|
||||||
|
@@ -72,7 +72,7 @@ class PythonParser(GenericASTBuilder):
|
|||||||
print("Instruction context:")
|
print("Instruction context:")
|
||||||
for i in range(start, finish):
|
for i in range(start, finish):
|
||||||
indent = ' ' if i != index else '-> '
|
indent = ' ' if i != index else '-> '
|
||||||
print("%s%s" % (indent, instructions[i].format()))
|
print("%s%s" % (indent, instructions[i]))
|
||||||
raise ParserError(err_token, err_token.offset)
|
raise ParserError(err_token, err_token.offset)
|
||||||
|
|
||||||
def typestring(self, token):
|
def typestring(self, token):
|
||||||
@@ -236,17 +236,12 @@ class PythonParser(GenericASTBuilder):
|
|||||||
stmt ::= augassign2
|
stmt ::= augassign2
|
||||||
augassign1 ::= expr expr inplace_op designator
|
augassign1 ::= expr expr inplace_op designator
|
||||||
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SUBSCR
|
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SUBSCR
|
||||||
augassign1 ::= expr expr inplace_op ROT_TWO STORE_SLICE+0
|
|
||||||
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SLICE+1
|
|
||||||
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SLICE+2
|
|
||||||
augassign1 ::= expr expr inplace_op ROT_FOUR STORE_SLICE+3
|
|
||||||
augassign2 ::= expr DUP_TOP LOAD_ATTR expr
|
augassign2 ::= expr DUP_TOP LOAD_ATTR expr
|
||||||
inplace_op ROT_TWO STORE_ATTR
|
inplace_op ROT_TWO STORE_ATTR
|
||||||
|
|
||||||
inplace_op ::= INPLACE_ADD
|
inplace_op ::= INPLACE_ADD
|
||||||
inplace_op ::= INPLACE_SUBTRACT
|
inplace_op ::= INPLACE_SUBTRACT
|
||||||
inplace_op ::= INPLACE_MULTIPLY
|
inplace_op ::= INPLACE_MULTIPLY
|
||||||
inplace_op ::= INPLACE_DIVIDE
|
|
||||||
inplace_op ::= INPLACE_TRUE_DIVIDE
|
inplace_op ::= INPLACE_TRUE_DIVIDE
|
||||||
inplace_op ::= INPLACE_FLOOR_DIVIDE
|
inplace_op ::= INPLACE_FLOOR_DIVIDE
|
||||||
inplace_op ::= INPLACE_MODULO
|
inplace_op ::= INPLACE_MODULO
|
||||||
@@ -273,7 +268,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
def p_forstmt(self, args):
|
def p_forstmt(self, args):
|
||||||
"""
|
"""
|
||||||
_for ::= GET_ITER FOR_ITER
|
_for ::= GET_ITER FOR_ITER
|
||||||
_for ::= LOAD_CONST FOR_LOOP
|
|
||||||
|
|
||||||
for_block ::= l_stmts_opt _come_from JUMP_BACK
|
for_block ::= l_stmts_opt _come_from JUMP_BACK
|
||||||
for_block ::= return_stmts _come_from
|
for_block ::= return_stmts _come_from
|
||||||
@@ -339,8 +333,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
imports_cont ::= imports_cont import_cont
|
imports_cont ::= imports_cont import_cont
|
||||||
imports_cont ::= import_cont
|
imports_cont ::= import_cont
|
||||||
import_cont ::= LOAD_CONST LOAD_CONST import_as_cont
|
import_cont ::= LOAD_CONST LOAD_CONST import_as_cont
|
||||||
import_as_cont ::= IMPORT_NAME_CONT designator
|
|
||||||
import_as_cont ::= IMPORT_NAME_CONT load_attrs designator
|
|
||||||
import_as_cont ::= IMPORT_FROM designator
|
import_as_cont ::= IMPORT_FROM designator
|
||||||
|
|
||||||
load_attrs ::= LOAD_ATTR
|
load_attrs ::= LOAD_ATTR
|
||||||
@@ -371,9 +363,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
|
|
||||||
stmt ::= setcomp_func
|
stmt ::= setcomp_func
|
||||||
|
|
||||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
|
||||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
|
||||||
|
|
||||||
comp_iter ::= comp_if
|
comp_iter ::= comp_if
|
||||||
comp_iter ::= comp_ifnot
|
comp_iter ::= comp_ifnot
|
||||||
comp_iter ::= comp_for
|
comp_iter ::= comp_for
|
||||||
@@ -381,9 +370,7 @@ class PythonParser(GenericASTBuilder):
|
|||||||
comp_body ::= set_comp_body
|
comp_body ::= set_comp_body
|
||||||
comp_body ::= gen_comp_body
|
comp_body ::= gen_comp_body
|
||||||
comp_body ::= dict_comp_body
|
comp_body ::= dict_comp_body
|
||||||
set_comp_body ::= expr SET_ADD
|
|
||||||
gen_comp_body ::= expr YIELD_VALUE POP_TOP
|
gen_comp_body ::= expr YIELD_VALUE POP_TOP
|
||||||
dict_comp_body ::= expr expr MAP_ADD
|
|
||||||
|
|
||||||
comp_if ::= expr jmp_false comp_iter
|
comp_if ::= expr jmp_false comp_iter
|
||||||
comp_ifnot ::= expr jmp_true comp_iter
|
comp_ifnot ::= expr jmp_true comp_iter
|
||||||
@@ -394,7 +381,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
def p_expr(self, args):
|
def p_expr(self, args):
|
||||||
'''
|
'''
|
||||||
expr ::= _mklambda
|
expr ::= _mklambda
|
||||||
expr ::= SET_LINENO
|
|
||||||
expr ::= LOAD_FAST
|
expr ::= LOAD_FAST
|
||||||
expr ::= LOAD_NAME
|
expr ::= LOAD_NAME
|
||||||
expr ::= LOAD_CONST
|
expr ::= LOAD_CONST
|
||||||
@@ -411,19 +397,17 @@ class PythonParser(GenericASTBuilder):
|
|||||||
expr ::= unary_expr
|
expr ::= unary_expr
|
||||||
expr ::= call_function
|
expr ::= call_function
|
||||||
expr ::= unary_not
|
expr ::= unary_not
|
||||||
expr ::= unary_convert
|
|
||||||
expr ::= binary_subscr
|
expr ::= binary_subscr
|
||||||
expr ::= binary_subscr2
|
expr ::= binary_subscr2
|
||||||
expr ::= load_attr
|
expr ::= load_attr
|
||||||
expr ::= get_iter
|
expr ::= get_iter
|
||||||
expr ::= slice0
|
|
||||||
expr ::= slice1
|
|
||||||
expr ::= slice2
|
|
||||||
expr ::= slice3
|
|
||||||
expr ::= buildslice2
|
expr ::= buildslice2
|
||||||
expr ::= buildslice3
|
expr ::= buildslice3
|
||||||
expr ::= yield
|
expr ::= yield
|
||||||
|
|
||||||
|
# Possibly Python < 2.3
|
||||||
|
# expr ::= SET_LINENO
|
||||||
|
|
||||||
binary_expr ::= expr expr binary_op
|
binary_expr ::= expr expr binary_op
|
||||||
binary_op ::= BINARY_ADD
|
binary_op ::= BINARY_ADD
|
||||||
binary_op ::= BINARY_MULTIPLY
|
binary_op ::= BINARY_MULTIPLY
|
||||||
@@ -431,7 +415,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
binary_op ::= BINARY_OR
|
binary_op ::= BINARY_OR
|
||||||
binary_op ::= BINARY_XOR
|
binary_op ::= BINARY_XOR
|
||||||
binary_op ::= BINARY_SUBTRACT
|
binary_op ::= BINARY_SUBTRACT
|
||||||
binary_op ::= BINARY_DIVIDE
|
|
||||||
binary_op ::= BINARY_TRUE_DIVIDE
|
binary_op ::= BINARY_TRUE_DIVIDE
|
||||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||||
binary_op ::= BINARY_MODULO
|
binary_op ::= BINARY_MODULO
|
||||||
@@ -445,21 +428,11 @@ class PythonParser(GenericASTBuilder):
|
|||||||
unary_op ::= UNARY_INVERT
|
unary_op ::= UNARY_INVERT
|
||||||
|
|
||||||
unary_not ::= expr UNARY_NOT
|
unary_not ::= expr UNARY_NOT
|
||||||
unary_convert ::= expr UNARY_CONVERT
|
|
||||||
|
|
||||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
|
||||||
|
|
||||||
load_attr ::= expr LOAD_ATTR
|
load_attr ::= expr LOAD_ATTR
|
||||||
get_iter ::= expr GET_ITER
|
get_iter ::= expr GET_ITER
|
||||||
slice0 ::= expr SLICE+0
|
|
||||||
slice0 ::= expr DUP_TOP SLICE+0
|
|
||||||
slice1 ::= expr expr SLICE+1
|
|
||||||
slice1 ::= expr expr DUP_TOPX_2 SLICE+1
|
|
||||||
slice2 ::= expr expr SLICE+2
|
|
||||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
|
||||||
slice3 ::= expr expr expr SLICE+3
|
|
||||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
|
||||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||||
|
|
||||||
@@ -468,12 +441,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
_mklambda ::= load_closure mklambda
|
_mklambda ::= load_closure mklambda
|
||||||
_mklambda ::= mklambda
|
_mklambda ::= mklambda
|
||||||
|
|
||||||
# Note: Python < 2.7 doesn't have *POP* or this. Remove from here?
|
|
||||||
# FIXME: segregate 2.7+
|
|
||||||
|
|
||||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
|
||||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
|
||||||
|
|
||||||
or ::= expr jmp_true expr come_from_opt
|
or ::= expr jmp_true expr come_from_opt
|
||||||
and ::= expr jmp_false expr come_from_opt
|
and ::= expr jmp_false expr come_from_opt
|
||||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||||
@@ -492,14 +459,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
ret_expr_or_cond ::= ret_cond
|
ret_expr_or_cond ::= ret_cond
|
||||||
ret_expr_or_cond ::= ret_cond_not
|
ret_expr_or_cond ::= ret_cond_not
|
||||||
|
|
||||||
# Note: Python < 2.7 doesn't have *POP* or this. Remove from here?
|
|
||||||
# FIXME: segregate 2.7+
|
|
||||||
|
|
||||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
|
||||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
|
||||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
|
||||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
|
||||||
|
|
||||||
stmt ::= return_lambda
|
stmt ::= return_lambda
|
||||||
stmt ::= conditional_lambda
|
stmt ::= conditional_lambda
|
||||||
|
|
||||||
@@ -511,15 +470,9 @@ class PythonParser(GenericASTBuilder):
|
|||||||
compare ::= expr expr COMPARE_OP
|
compare ::= expr expr COMPARE_OP
|
||||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||||
_come_from
|
_come_from
|
||||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
|
||||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
|
||||||
cmp_list1 COME_FROM
|
|
||||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
COMPARE_OP jmp_false
|
COMPARE_OP jmp_false
|
||||||
cmp_list1 _come_from
|
cmp_list1 _come_from
|
||||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
|
||||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
|
||||||
cmp_list2 COME_FROM
|
|
||||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
COMPARE_OP jmp_false
|
COMPARE_OP jmp_false
|
||||||
cmp_list2 _come_from
|
cmp_list2 _come_from
|
||||||
@@ -564,10 +517,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
designator ::= STORE_GLOBAL
|
designator ::= STORE_GLOBAL
|
||||||
designator ::= STORE_DEREF
|
designator ::= STORE_DEREF
|
||||||
designator ::= expr STORE_ATTR
|
designator ::= expr STORE_ATTR
|
||||||
designator ::= expr STORE_SLICE+0
|
|
||||||
designator ::= expr expr STORE_SLICE+1
|
|
||||||
designator ::= expr expr STORE_SLICE+2
|
|
||||||
designator ::= expr expr expr STORE_SLICE+3
|
|
||||||
designator ::= store_subscr
|
designator ::= store_subscr
|
||||||
store_subscr ::= expr expr STORE_SUBSCR
|
store_subscr ::= expr expr STORE_SUBSCR
|
||||||
designator ::= unpack
|
designator ::= unpack
|
||||||
@@ -583,7 +532,7 @@ def parse(p, tokens, customize):
|
|||||||
|
|
||||||
|
|
||||||
def get_python_parser(
|
def get_python_parser(
|
||||||
version, debug_parser, compile_mode='exec',
|
version, debug_parser={}, compile_mode='exec',
|
||||||
is_pypy = False):
|
is_pypy = False):
|
||||||
"""Returns parser object for Python version 2 or 3, 3.2, 3.5on,
|
"""Returns parser object for Python version 2 or 3, 3.2, 3.5on,
|
||||||
etc., depending on the parameters passed. *compile_mode* is either
|
etc., depending on the parameters passed. *compile_mode* is either
|
||||||
@@ -594,7 +543,13 @@ def get_python_parser(
|
|||||||
|
|
||||||
# FIXME: there has to be a better way...
|
# FIXME: there has to be a better way...
|
||||||
if version < 3.0:
|
if version < 3.0:
|
||||||
if version == 2.3:
|
if version == 2.2:
|
||||||
|
import uncompyle6.parsers.parse22 as parse22
|
||||||
|
if compile_mode == 'exec':
|
||||||
|
p = parse22.Python22Parser(debug_parser)
|
||||||
|
else:
|
||||||
|
p = parse22.Python22ParserSingle(debug_parser)
|
||||||
|
elif version == 2.3:
|
||||||
import uncompyle6.parsers.parse23 as parse23
|
import uncompyle6.parsers.parse23 as parse23
|
||||||
if compile_mode == 'exec':
|
if compile_mode == 'exec':
|
||||||
p = parse23.Python23Parser(debug_parser)
|
p = parse23.Python23Parser(debug_parser)
|
||||||
@@ -651,11 +606,12 @@ def get_python_parser(
|
|||||||
p = parse34.Python34Parser(debug_parser)
|
p = parse34.Python34Parser(debug_parser)
|
||||||
else:
|
else:
|
||||||
p = parse34.Python34ParserSingle(debug_parser)
|
p = parse34.Python34ParserSingle(debug_parser)
|
||||||
elif version >= 3.5:
|
elif version == 3.5:
|
||||||
|
import uncompyle6.parsers.parse35 as parse35
|
||||||
if compile_mode == 'exec':
|
if compile_mode == 'exec':
|
||||||
p = parse3.Python35onParser(debug_parser)
|
p = parse35.Python35Parser(debug_parser)
|
||||||
else:
|
else:
|
||||||
p = parse3.Python35onParserSingle(debug_parser)
|
p = parse35.Python35ParserSingle(debug_parser)
|
||||||
else:
|
else:
|
||||||
if compile_mode == 'exec':
|
if compile_mode == 'exec':
|
||||||
p = parse3.Python3Parser(debug_parser)
|
p = parse3.Python3Parser(debug_parser)
|
||||||
|
@@ -40,6 +40,15 @@ class Python2Parser(PythonParser):
|
|||||||
print_nl ::= PRINT_NEWLINE
|
print_nl ::= PRINT_NEWLINE
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
def p_stmt2(self, args):
|
||||||
|
"""
|
||||||
|
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM
|
||||||
|
|
||||||
|
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
||||||
|
exec_stmt ::= expr exprlist EXEC_STMT
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def p_print_to(self, args):
|
def p_print_to(self, args):
|
||||||
'''
|
'''
|
||||||
stmt ::= print_to
|
stmt ::= print_to
|
||||||
@@ -84,8 +93,6 @@ class Python2Parser(PythonParser):
|
|||||||
raise_stmt3 ::= expr expr expr RAISE_VARARGS_3
|
raise_stmt3 ::= expr expr expr RAISE_VARARGS_3
|
||||||
|
|
||||||
stmt ::= exec_stmt
|
stmt ::= exec_stmt
|
||||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
|
||||||
exec_stmt ::= expr exprlist EXEC_STMT
|
|
||||||
|
|
||||||
stmt ::= assert
|
stmt ::= assert
|
||||||
stmt ::= assert2
|
stmt ::= assert2
|
||||||
@@ -202,18 +209,24 @@ class Python2Parser(PythonParser):
|
|||||||
genexpr ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
genexpr ::= LOAD_GENEXPR MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def p_import2(self, args):
|
# def p_import2(self, args):
|
||||||
'''
|
# '''
|
||||||
# These might be relevant for only Python 2.0 or so.
|
# # These might be relevant for only Python 2.0 or so.
|
||||||
# Not relevant for Python 3.
|
# importstar ::= LOAD_CONST LOAD_CONST IMPORT_NAME_CONT IMPORT_STAR
|
||||||
importstar ::= LOAD_CONST LOAD_CONST IMPORT_NAME_CONT IMPORT_STAR
|
# importfrom ::= LOAD_CONST LOAD_CONST IMPORT_NAME_CONT importlist2 POP_TOP
|
||||||
importfrom ::= LOAD_CONST LOAD_CONST IMPORT_NAME_CONT importlist2 POP_TOP
|
# import_as_cont ::= IMPORT_NAME_CONT designator
|
||||||
'''
|
# import_as_cont ::= IMPORT_NAME_CONT load_attrs designator
|
||||||
|
# '''
|
||||||
|
|
||||||
|
|
||||||
def p_expr2(self, args):
|
def p_expr2(self, args):
|
||||||
'''
|
"""
|
||||||
expr ::= LOAD_LOCALS
|
expr ::= LOAD_LOCALS
|
||||||
|
expr ::= slice0
|
||||||
|
expr ::= slice1
|
||||||
|
expr ::= slice2
|
||||||
|
expr ::= slice3
|
||||||
|
expr ::= unary_convert
|
||||||
|
|
||||||
slice0 ::= expr SLICE+0
|
slice0 ::= expr SLICE+0
|
||||||
slice0 ::= expr DUP_TOP SLICE+0
|
slice0 ::= expr DUP_TOP SLICE+0
|
||||||
@@ -223,10 +236,38 @@ class Python2Parser(PythonParser):
|
|||||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||||
slice3 ::= expr expr expr SLICE+3
|
slice3 ::= expr expr expr SLICE+3
|
||||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||||
|
unary_convert ::= expr UNARY_CONVERT
|
||||||
|
|
||||||
# In Python 3, DUP_TOPX_2 is DUP_TOP_TWO
|
# In Python 3, DUP_TOPX_2 is DUP_TOP_TWO
|
||||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||||
'''
|
"""
|
||||||
|
|
||||||
|
def p_slice2(self, args):
|
||||||
|
"""
|
||||||
|
designator ::= expr STORE_SLICE+0
|
||||||
|
designator ::= expr expr STORE_SLICE+1
|
||||||
|
designator ::= expr expr STORE_SLICE+2
|
||||||
|
designator ::= expr expr expr STORE_SLICE+3
|
||||||
|
augassign1 ::= expr expr inplace_op ROT_TWO STORE_SLICE+0
|
||||||
|
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SLICE+1
|
||||||
|
augassign1 ::= expr expr inplace_op ROT_THREE STORE_SLICE+2
|
||||||
|
augassign1 ::= expr expr inplace_op ROT_FOUR STORE_SLICE+3
|
||||||
|
slice0 ::= expr SLICE+0
|
||||||
|
slice0 ::= expr DUP_TOP SLICE+0
|
||||||
|
slice1 ::= expr expr SLICE+1
|
||||||
|
slice1 ::= expr expr DUP_TOPX_2 SLICE+1
|
||||||
|
slice2 ::= expr expr SLICE+2
|
||||||
|
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||||
|
slice3 ::= expr expr expr SLICE+3
|
||||||
|
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||||
|
"""
|
||||||
|
|
||||||
|
def p_op2(self, args):
|
||||||
|
"""
|
||||||
|
inplace_op ::= INPLACE_DIVIDE
|
||||||
|
binary_op ::= BINARY_DIVIDE
|
||||||
|
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||||
|
"""
|
||||||
|
|
||||||
def add_custom_rules(self, tokens, customize):
|
def add_custom_rules(self, tokens, customize):
|
||||||
'''
|
'''
|
||||||
@@ -252,12 +293,14 @@ class Python2Parser(PythonParser):
|
|||||||
for opname, v in list(customize.items()):
|
for opname, v in list(customize.items()):
|
||||||
opname_base = opname[:opname.rfind('_')]
|
opname_base = opname[:opname.rfind('_')]
|
||||||
if opname == 'PyPy':
|
if opname == 'PyPy':
|
||||||
self.add_unique_rules([
|
self.addRule("""
|
||||||
'stmt ::= assign3_pypy',
|
stmt ::= assign3_pypy
|
||||||
'stmt ::= assign2_pypy',
|
stmt ::= assign2_pypy
|
||||||
'assign3_pypy ::= expr expr expr designator designator designator',
|
assign3_pypy ::= expr expr expr designator designator designator
|
||||||
'assign2_pypy ::= expr expr designator designator'
|
assign2_pypy ::= expr expr designator designator
|
||||||
], customize)
|
list_compr ::= expr BUILD_LIST_FROM_ARG _for designator list_iter
|
||||||
|
JUMP_BACK
|
||||||
|
""", nop_func)
|
||||||
continue
|
continue
|
||||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||||
thousands = (v//1024)
|
thousands = (v//1024)
|
||||||
@@ -331,25 +374,31 @@ class Python2Parser(PythonParser):
|
|||||||
elif opname_base == 'UNPACK_LIST':
|
elif opname_base == 'UNPACK_LIST':
|
||||||
rule = 'unpack_list ::= ' + opname + ' designator'*v
|
rule = 'unpack_list ::= ' + opname + ' designator'*v
|
||||||
elif opname_base in ('DUP_TOPX', 'RAISE_VARARGS'):
|
elif opname_base in ('DUP_TOPX', 'RAISE_VARARGS'):
|
||||||
# no need to add a rule
|
# FIXME: remove these conditions if they are not needed.
|
||||||
|
# no longer need to add a rule
|
||||||
continue
|
continue
|
||||||
# rule = 'dup_topx ::= ' + 'expr '*v + opname
|
|
||||||
elif opname_base == 'MAKE_FUNCTION':
|
elif opname_base == 'MAKE_FUNCTION':
|
||||||
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
||||||
('pos_arg '*v, opname), nop_func)
|
('pos_arg '*v, opname), nop_func)
|
||||||
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr '*v, opname)
|
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr '*v, opname)
|
||||||
elif opname_base == 'MAKE_CLOSURE':
|
elif opname_base == 'MAKE_CLOSURE':
|
||||||
# FIXME: use add_unique_rules to tidy this up.
|
# FIXME: use add_unique_rules to tidy this up.
|
||||||
self.addRule('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
self.add_unique_rules([
|
||||||
('expr '*v, opname), nop_func)
|
('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
||||||
self.addRule('genexpr ::= %s load_closure LOAD_GENEXPR %s expr GET_ITER CALL_FUNCTION_1' %
|
('expr '*v, opname)),
|
||||||
('expr '*v, opname), nop_func)
|
('genexpr ::= %s load_closure LOAD_GENEXPR %s expr'
|
||||||
self.addRule('setcomp ::= %s load_closure LOAD_SETCOMP %s expr GET_ITER CALL_FUNCTION_1' %
|
' GET_ITER CALL_FUNCTION_1' %
|
||||||
('expr '*v, opname), nop_func)
|
('expr '*v, opname)),
|
||||||
self.addRule('dictcomp ::= %s load_closure LOAD_DICTCOMP %s expr GET_ITER CALL_FUNCTION_1' %
|
('setcomp ::= %s load_closure LOAD_SETCOMP %s expr'
|
||||||
('expr '*v, opname), nop_func)
|
' GET_ITER CALL_FUNCTION_1' %
|
||||||
rule = 'mkfunc ::= %s load_closure LOAD_CONST %s' % ('expr '*v, opname)
|
('expr '*v, opname)),
|
||||||
# rule = 'mkfunc ::= %s closure_list LOAD_CONST %s' % ('expr '*v, opname)
|
('dictcomp ::= %s load_closure LOAD_DICTCOMP %s expr'
|
||||||
|
' GET_ITER CALL_FUNCTION_1' %
|
||||||
|
('expr '*v, opname)),
|
||||||
|
('mkfunc ::= %s load_closure LOAD_CONST %s' %
|
||||||
|
('expr '*v, opname))],
|
||||||
|
customize)
|
||||||
|
continue
|
||||||
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||||
args_pos = (v & 0xff) # positional parameters
|
args_pos = (v & 0xff) # positional parameters
|
||||||
@@ -360,7 +409,7 @@ class Python2Parser(PythonParser):
|
|||||||
+ 'expr ' * nak + opname
|
+ 'expr ' * nak + opname
|
||||||
elif opname_base == 'CALL_METHOD':
|
elif opname_base == 'CALL_METHOD':
|
||||||
# PyPy only - DRY with parse3
|
# PyPy only - DRY with parse3
|
||||||
args_pos = (v & 0xff) # positional parameters
|
args_pos = (v & 0xff) # positional parameters
|
||||||
args_kw = (v >> 8) & 0xff # keyword parameters
|
args_kw = (v >> 8) & 0xff # keyword parameters
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
||||||
|
29
uncompyle6/parsers/parse22.py
Normal file
29
uncompyle6/parsers/parse22.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Copyright (c) 2016 Rocky Bernstein
|
||||||
|
# Copyright (c) 2000-2002 by hartmut Goebel <hartmut@goebel.noris.de>
|
||||||
|
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
from uncompyle6.parser import PythonParserSingle
|
||||||
|
from uncompyle6.parsers.parse23 import Python23Parser
|
||||||
|
|
||||||
|
class Python22Parser(Python23Parser):
|
||||||
|
|
||||||
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
|
super(Python23Parser, self).__init__(debug_parser)
|
||||||
|
self.customized = {}
|
||||||
|
|
||||||
|
def p_misc22(self, args):
|
||||||
|
'''
|
||||||
|
_for ::= LOAD_CONST FOR_LOOP
|
||||||
|
'''
|
||||||
|
|
||||||
|
class Python22ParserSingle(Python23Parser, PythonParserSingle):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Check grammar
|
||||||
|
p = Python22Parser()
|
||||||
|
p.checkGrammar()
|
||||||
|
p.dumpGrammar()
|
||||||
|
|
||||||
|
# local variables:
|
||||||
|
# tab-width: 4
|
@@ -14,9 +14,11 @@ class Python23Parser(Python24Parser):
|
|||||||
|
|
||||||
def p_misc23(self, args):
|
def p_misc23(self, args):
|
||||||
'''
|
'''
|
||||||
_while1test ::= JUMP_FORWARD JUMP_IF_FALSE POP_TOP COME_FROM
|
# Used to keep semantic positions the same across later versions
|
||||||
|
# of Python
|
||||||
|
_while1test ::= SETUP_LOOP JUMP_FORWARD JUMP_IF_FALSE POP_TOP COME_FROM
|
||||||
|
|
||||||
while1stmt ::= SETUP_LOOP _while1test l_stmts JUMP_BACK
|
while1stmt ::= _while1test l_stmts_opt JUMP_BACK
|
||||||
COME_FROM POP_TOP POP_BLOCK COME_FROM
|
COME_FROM POP_TOP POP_BLOCK COME_FROM
|
||||||
|
|
||||||
list_compr ::= BUILD_LIST_0 DUP_TOP LOAD_ATTR designator list_iter del_stmt
|
list_compr ::= BUILD_LIST_0 DUP_TOP LOAD_ATTR designator list_iter del_stmt
|
||||||
|
@@ -26,6 +26,7 @@ class Python24Parser(Python25Parser):
|
|||||||
|
|
||||||
# Python 2.5+ omits POP_TOP POP_BLOCK
|
# Python 2.5+ omits POP_TOP POP_BLOCK
|
||||||
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_TOP POP_BLOCK COME_FROM
|
while1stmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_TOP POP_BLOCK COME_FROM
|
||||||
|
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_TOP POP_BLOCK COME_FROM
|
||||||
|
|
||||||
# Python 2.5+:
|
# Python 2.5+:
|
||||||
# call_stmt ::= expr POP_TOP
|
# call_stmt ::= expr POP_TOP
|
||||||
|
@@ -14,7 +14,6 @@ class Python25Parser(Python26Parser):
|
|||||||
|
|
||||||
def p_misc25(self, args):
|
def p_misc25(self, args):
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# If "return_if_stmt" is in a loop, a JUMP_BACK can be emitted. In 2.6 the
|
# If "return_if_stmt" is in a loop, a JUMP_BACK can be emitted. In 2.6 the
|
||||||
# JUMP_BACK doesn't appear
|
# JUMP_BACK doesn't appear
|
||||||
|
|
||||||
|
@@ -157,6 +157,8 @@ class Python26Parser(Python2Parser):
|
|||||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK come_from_pop
|
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK come_from_pop
|
||||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_from_pop
|
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE come_from_pop
|
||||||
|
|
||||||
|
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||||
|
|
||||||
# Common with 2.7
|
# Common with 2.7
|
||||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||||
@@ -201,7 +203,6 @@ class Python26Parser(Python2Parser):
|
|||||||
ret_cond_not ::= expr jmp_true expr RETURN_END_IF come_from_pop ret_expr_or_cond
|
ret_cond_not ::= expr jmp_true expr RETURN_END_IF come_from_pop ret_expr_or_cond
|
||||||
|
|
||||||
# FIXME: split into Python 2.5
|
# FIXME: split into Python 2.5
|
||||||
ret_cond ::= expr jmp_false expr JUMP_RETURN come_from_pop ret_expr_or_cond
|
|
||||||
ret_or ::= expr jmp_true ret_expr_or_cond come_froms
|
ret_or ::= expr jmp_true ret_expr_or_cond come_froms
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@@ -224,3 +225,20 @@ if __name__ == '__main__':
|
|||||||
# Check grammar
|
# Check grammar
|
||||||
p = Python26Parser()
|
p = Python26Parser()
|
||||||
p.checkGrammar()
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 2.6:
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# print(sorted(p.rule2name.items()))
|
||||||
|
@@ -12,11 +12,17 @@ class Python27Parser(Python2Parser):
|
|||||||
super(Python27Parser, self).__init__(debug_parser)
|
super(Python27Parser, self).__init__(debug_parser)
|
||||||
self.customized = {}
|
self.customized = {}
|
||||||
|
|
||||||
def p_list_comprehension27(self, args):
|
def p_comprehension27(self, args):
|
||||||
"""
|
"""
|
||||||
list_for ::= expr _for designator list_iter JUMP_BACK
|
list_for ::= expr _for designator list_iter JUMP_BACK
|
||||||
|
|
||||||
list_compr ::= expr BUILD_LIST_FROM_ARG _for designator list_iter JUMP_BACK
|
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
||||||
|
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||||
|
|
||||||
|
dict_comp_body ::= expr expr MAP_ADD
|
||||||
|
set_comp_body ::= expr SET_ADD
|
||||||
|
|
||||||
|
# See also common Python p_list_comprehension
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def p_try27(self, args):
|
def p_try27(self, args):
|
||||||
@@ -39,11 +45,27 @@ class Python27Parser(Python2Parser):
|
|||||||
def p_jump27(self, args):
|
def p_jump27(self, args):
|
||||||
"""
|
"""
|
||||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD COME_FROM
|
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD COME_FROM
|
||||||
|
bp_come_from ::= POP_BLOCK COME_FROM
|
||||||
|
|
||||||
|
# FIXME: Common with 3.0+
|
||||||
jmp_false ::= POP_JUMP_IF_FALSE
|
jmp_false ::= POP_JUMP_IF_FALSE
|
||||||
jmp_true ::= POP_JUMP_IF_TRUE
|
jmp_true ::= POP_JUMP_IF_TRUE
|
||||||
bp_come_from ::= POP_BLOCK COME_FROM
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||||
|
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||||
|
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||||
|
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||||
|
|
||||||
|
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||||
|
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||||
|
|
||||||
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
|
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
|
cmp_list1 COME_FROM
|
||||||
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
|
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
|
cmp_list2 COME_FROM
|
||||||
|
"""
|
||||||
|
|
||||||
def p_stmt27(self, args):
|
def p_stmt27(self, args):
|
||||||
"""
|
"""
|
||||||
@@ -61,6 +83,8 @@ class Python27Parser(Python2Parser):
|
|||||||
POP_BLOCK LOAD_CONST COME_FROM
|
POP_BLOCK LOAD_CONST COME_FROM
|
||||||
WITH_CLEANUP END_FINALLY
|
WITH_CLEANUP END_FINALLY
|
||||||
|
|
||||||
|
while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM
|
||||||
|
|
||||||
# Common with 2.6
|
# Common with 2.6
|
||||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||||
@@ -73,3 +97,20 @@ if __name__ == '__main__':
|
|||||||
# Check grammar
|
# Check grammar
|
||||||
p = Python27Parser()
|
p = Python27Parser()
|
||||||
p.checkGrammar()
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 2.7:
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# p.dumpGrammar()
|
||||||
|
@@ -17,22 +17,18 @@ that a later phase can turn into a sequence of ASCII text.
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
from uncompyle6.parser import PythonParser, PythonParserSingle
|
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||||
from uncompyle6.parsers.astnode import AST
|
from uncompyle6.parsers.astnode import AST
|
||||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
from uncompyle6 import PYTHON3
|
|
||||||
|
|
||||||
class Python3Parser(PythonParser):
|
class Python3Parser(PythonParser):
|
||||||
|
|
||||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
self.added_rules = set()
|
self.added_rules = set()
|
||||||
if PYTHON3:
|
super(Python3Parser, self).__init__(AST, 'stmts', debug=debug_parser)
|
||||||
super().__init__(AST, 'stmts', debug=debug_parser)
|
|
||||||
else:
|
|
||||||
super(Python3Parser, self).__init__(AST, 'stmts', debug=debug_parser)
|
|
||||||
self.new_rules = set()
|
self.new_rules = set()
|
||||||
|
|
||||||
def p_list_comprehension3(self, args):
|
def p_comprehension3(self, args):
|
||||||
"""
|
"""
|
||||||
# Python3 scanner adds LOAD_LISTCOMP. Python3 does list comprehension like
|
# Python3 scanner adds LOAD_LISTCOMP. Python3 does list comprehension like
|
||||||
# other comprehensions (set, dictionary).
|
# other comprehensions (set, dictionary).
|
||||||
@@ -51,6 +47,11 @@ class Python3Parser(PythonParser):
|
|||||||
jb_or_c ::= JUMP_BACK
|
jb_or_c ::= JUMP_BACK
|
||||||
jb_or_c ::= CONTINUE
|
jb_or_c ::= CONTINUE
|
||||||
|
|
||||||
|
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
||||||
|
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||||
|
dict_comp_body ::= expr expr MAP_ADD
|
||||||
|
set_comp_body ::= expr SET_ADD
|
||||||
|
|
||||||
# See also common Python p_list_comprehension
|
# See also common Python p_list_comprehension
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -89,10 +90,6 @@ class Python3Parser(PythonParser):
|
|||||||
raise_stmt2 ::= expr expr RAISE_VARARGS_2
|
raise_stmt2 ::= expr expr RAISE_VARARGS_2
|
||||||
raise_stmt3 ::= expr expr expr RAISE_VARARGS_3
|
raise_stmt3 ::= expr expr expr RAISE_VARARGS_3
|
||||||
|
|
||||||
stmt ::= exec_stmt
|
|
||||||
exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT
|
|
||||||
exec_stmt ::= expr exprlist EXEC_STMT
|
|
||||||
|
|
||||||
stmt ::= assert
|
stmt ::= assert
|
||||||
stmt ::= assert2
|
stmt ::= assert2
|
||||||
stmt ::= ifstmt
|
stmt ::= ifstmt
|
||||||
@@ -114,10 +111,6 @@ class Python3Parser(PythonParser):
|
|||||||
del_stmt ::= DELETE_FAST
|
del_stmt ::= DELETE_FAST
|
||||||
del_stmt ::= DELETE_NAME
|
del_stmt ::= DELETE_NAME
|
||||||
del_stmt ::= DELETE_GLOBAL
|
del_stmt ::= DELETE_GLOBAL
|
||||||
del_stmt ::= expr DELETE_SLICE+0
|
|
||||||
del_stmt ::= expr expr DELETE_SLICE+1
|
|
||||||
del_stmt ::= expr expr DELETE_SLICE+2
|
|
||||||
del_stmt ::= expr expr expr DELETE_SLICE+3
|
|
||||||
del_stmt ::= delete_subscr
|
del_stmt ::= delete_subscr
|
||||||
delete_subscr ::= expr expr DELETE_SUBSCR
|
delete_subscr ::= expr expr DELETE_SUBSCR
|
||||||
del_stmt ::= expr DELETE_ATTR
|
del_stmt ::= expr DELETE_ATTR
|
||||||
@@ -262,6 +255,22 @@ class Python3Parser(PythonParser):
|
|||||||
come_froms ::= COME_FROM
|
come_froms ::= COME_FROM
|
||||||
jmp_false ::= POP_JUMP_IF_FALSE
|
jmp_false ::= POP_JUMP_IF_FALSE
|
||||||
jmp_true ::= POP_JUMP_IF_TRUE
|
jmp_true ::= POP_JUMP_IF_TRUE
|
||||||
|
|
||||||
|
# FIXME: Common with 2.7
|
||||||
|
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||||
|
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||||
|
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||||
|
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||||
|
|
||||||
|
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||||
|
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||||
|
|
||||||
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
|
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
|
cmp_list1 COME_FROM
|
||||||
|
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||||
|
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||||
|
cmp_list2 COME_FROM
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def p_stmt3(self, args):
|
def p_stmt3(self, args):
|
||||||
@@ -356,7 +365,7 @@ class Python3Parser(PythonParser):
|
|||||||
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||||
|
|
||||||
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
||||||
"""
|
"""
|
||||||
# Low byte indicates number of positional paramters,
|
# Low byte indicates number of positional paramters,
|
||||||
# high byte number of positional parameters
|
# high byte number of positional parameters
|
||||||
args_pos = token.attr & 0xff
|
args_pos = token.attr & 0xff
|
||||||
@@ -435,13 +444,33 @@ class Python3Parser(PythonParser):
|
|||||||
For PYPY:
|
For PYPY:
|
||||||
load_attr ::= expr LOOKUP_METHOD
|
load_attr ::= expr LOOKUP_METHOD
|
||||||
call_function ::= expr CALL_METHOD
|
call_function ::= expr CALL_METHOD
|
||||||
"""
|
"""
|
||||||
|
saw_format_value = False
|
||||||
for i, token in enumerate(tokens):
|
for i, token in enumerate(tokens):
|
||||||
opname = token.type
|
opname = token.type
|
||||||
opname_base = opname[:opname.rfind('_')]
|
opname_base = opname[:opname.rfind('_')]
|
||||||
|
|
||||||
if opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
if opname == 'PyPy':
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
self.addRule("""
|
||||||
|
stmt ::= assign3_pypy
|
||||||
|
stmt ::= assign2_pypy
|
||||||
|
assign3_pypy ::= expr expr expr designator designator designator
|
||||||
|
assign2_pypy ::= expr expr designator designator
|
||||||
|
""", nop_func)
|
||||||
|
continue
|
||||||
|
elif opname == 'FORMAT_VALUE':
|
||||||
|
# Python 3.6+
|
||||||
|
self.addRule("""
|
||||||
|
formatted_value ::= expr FORMAT_VALUE
|
||||||
|
formatted_value ::= expr FORMAT_VALUE
|
||||||
|
str ::= LOAD_CONST
|
||||||
|
formatted_value_or_str ::= formatted_value
|
||||||
|
formatted_value_or_str ::= str
|
||||||
|
""", nop_func)
|
||||||
|
saw_format_value = True
|
||||||
|
|
||||||
|
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||||
|
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||||
self.custom_classfunc_rule(opname, token, customize)
|
self.custom_classfunc_rule(opname, token, customize)
|
||||||
elif opname == 'LOAD_DICTCOMP':
|
elif opname == 'LOAD_DICTCOMP':
|
||||||
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||||
@@ -462,6 +491,16 @@ class Python3Parser(PythonParser):
|
|||||||
if opname_base == 'BUILD_TUPLE':
|
if opname_base == 'BUILD_TUPLE':
|
||||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
if opname_base == 'BUILD_LIST' and saw_format_value:
|
||||||
|
saw_format_value = False
|
||||||
|
format_or_str_n = "formatted_value_or_str_%s" % v
|
||||||
|
self.addRule("""
|
||||||
|
expr ::= joined_str
|
||||||
|
joined_str ::= LOAD_CONST LOAD_ATTR %s CALL_FUNCTION_1
|
||||||
|
%s ::= %s%s
|
||||||
|
""" % (format_or_str_n, format_or_str_n, ("formatted_value_or_str " *v), opname),
|
||||||
|
nop_func)
|
||||||
|
|
||||||
elif opname == 'LOOKUP_METHOD':
|
elif opname == 'LOOKUP_METHOD':
|
||||||
# A PyPy speciality - DRY with parse2
|
# A PyPy speciality - DRY with parse2
|
||||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||||
@@ -542,14 +581,17 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
elif opname_base == 'CALL_METHOD':
|
elif opname_base == 'CALL_METHOD':
|
||||||
# PyPy only - DRY with parse2
|
# PyPy only - DRY with parse2
|
||||||
|
|
||||||
|
# FIXME: The below argument parsing will be wrong when PyPy gets to 3.6
|
||||||
args_pos = (token.attr & 0xff) # positional parameters
|
args_pos = (token.attr & 0xff) # positional parameters
|
||||||
args_kw = (token.attr >> 8) & 0xff # keyword parameters
|
args_kw = (token.attr >> 8) & 0xff # keyword parameters
|
||||||
|
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
||||||
rule = ('call_function ::= expr '
|
rule = ('call_function ::= expr '
|
||||||
+ ('pos_arg ' * args_pos)
|
+ ('pos_arg ' * args_pos)
|
||||||
+ ('kwarg ' * args_kw)
|
+ ('kwarg ' * args_kw)
|
||||||
+ 'expr ' * nak + token.type)
|
+ 'expr ' * nak + opname)
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
elif opname.startswith('MAKE_CLOSURE'):
|
elif opname.startswith('MAKE_CLOSURE'):
|
||||||
# DRY with MAKE_FUNCTION
|
# DRY with MAKE_FUNCTION
|
||||||
@@ -594,6 +636,7 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
|
|
||||||
class Python32Parser(Python3Parser):
|
class Python32Parser(Python3Parser):
|
||||||
|
|
||||||
def p_32(self, args):
|
def p_32(self, args):
|
||||||
"""
|
"""
|
||||||
# Store locals is only in Python 3.0 to 3.3
|
# Store locals is only in Python 3.0 to 3.3
|
||||||
@@ -613,49 +656,6 @@ class Python33Parser(Python3Parser):
|
|||||||
yield_from ::= expr expr YIELD_FROM
|
yield_from ::= expr expr YIELD_FROM
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class Python35onParser(Python3Parser):
|
|
||||||
def p_35on(self, args):
|
|
||||||
"""
|
|
||||||
# Python 3.5+ has WITH_CLEANUP_START/FINISH
|
|
||||||
|
|
||||||
withstmt ::= expr SETUP_WITH exprlist suite_stmts_opt
|
|
||||||
POP_BLOCK LOAD_CONST COME_FROM
|
|
||||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
|
||||||
|
|
||||||
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt
|
|
||||||
POP_BLOCK LOAD_CONST COME_FROM
|
|
||||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
|
||||||
|
|
||||||
withasstmt ::= expr SETUP_WITH designator suite_stmts_opt
|
|
||||||
POP_BLOCK LOAD_CONST COME_FROM
|
|
||||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
|
||||||
|
|
||||||
inplace_op ::= INPLACE_MATRIX_MULTIPLY
|
|
||||||
binary_op ::= BINARY_MATRIX_MULTIPLY
|
|
||||||
|
|
||||||
# Python 3.5+ does jump optimization
|
|
||||||
# In <.3.5 the below is a JUMP_FORWARD to a JUMP_ABSOLUTE.
|
|
||||||
# in return_stmt, we will need the semantic actions in pysource.py
|
|
||||||
# to work out whether to dedent or not based on the presence of
|
|
||||||
# RETURN_END_IF vs RETURN_VALUE
|
|
||||||
|
|
||||||
ifelsestmtc ::= testexpr c_stmts_opt JUMP_FORWARD else_suitec
|
|
||||||
return_stmt ::= ret_expr RETURN_END_IF
|
|
||||||
|
|
||||||
|
|
||||||
# Python 3.3+ also has yield from. 3.5 does it
|
|
||||||
# differently than 3.3, 3.4
|
|
||||||
|
|
||||||
expr ::= yield_from
|
|
||||||
yield_from ::= expr GET_YIELD_FROM_ITER LOAD_CONST YIELD_FROM
|
|
||||||
|
|
||||||
# Python 3.4+ has more loop optimization that removes
|
|
||||||
# JUMP_FORWARD in some cases, and hence we also don't
|
|
||||||
# see COME_FROM
|
|
||||||
_ifstmts_jump ::= c_stmts_opt
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
class Python3ParserSingle(Python3Parser, PythonParserSingle):
|
class Python3ParserSingle(Python3Parser, PythonParserSingle):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -667,18 +667,15 @@ class Python32ParserSingle(Python32Parser, PythonParserSingle):
|
|||||||
class Python33ParserSingle(Python33Parser, PythonParserSingle):
|
class Python33ParserSingle(Python33Parser, PythonParserSingle):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Python35onParserSingle(Python35onParser, PythonParserSingle):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def info(args):
|
def info(args):
|
||||||
# Check grammar
|
# Check grammar
|
||||||
# Should also add a way to dump grammar
|
# Should also add a way to dump grammar
|
||||||
import sys
|
|
||||||
p = Python3Parser()
|
p = Python3Parser()
|
||||||
if len(args) > 0:
|
if len(args) > 0:
|
||||||
arg = args[0]
|
arg = args[0]
|
||||||
if arg == '3.5':
|
if arg == '3.5':
|
||||||
p = Python35onParser()
|
from uncompyle6.parser.parse35 import Python35Parser
|
||||||
|
p = Python35Parser()
|
||||||
elif arg == '3.3':
|
elif arg == '3.3':
|
||||||
p = Python33Parser()
|
p = Python33Parser()
|
||||||
elif arg == '3.2':
|
elif arg == '3.2':
|
||||||
|
@@ -46,13 +46,24 @@ class Python34ParserSingle(Python34Parser, PythonParserSingle):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def info(args):
|
if __name__ == '__main__':
|
||||||
# Check grammar
|
# Check grammar
|
||||||
# Should also add a way to dump grammar
|
|
||||||
p = Python34Parser()
|
p = Python34Parser()
|
||||||
p.checkGrammar()
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 3.4:
|
||||||
if __name__ == '__main__':
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
import sys
|
from uncompyle6.scanner import get_scanner
|
||||||
info(sys.argv)
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# print(sorted(p.rule2name.items()))
|
||||||
|
78
uncompyle6/parsers/parse35.py
Normal file
78
uncompyle6/parsers/parse35.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
# Copyright (c) 2016 Rocky Bernstein
|
||||||
|
"""
|
||||||
|
spark grammar differences over Python3 for Python 3.5.
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from uncompyle6.parser import PythonParserSingle
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
from uncompyle6.parsers.parse3 import Python3Parser
|
||||||
|
|
||||||
|
class Python35Parser(Python3Parser):
|
||||||
|
|
||||||
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
|
super(Python35Parser, self).__init__(debug_parser)
|
||||||
|
self.customized = {}
|
||||||
|
|
||||||
|
def p_35on(self, args):
|
||||||
|
"""
|
||||||
|
# Python 3.5+ has WITH_CLEANUP_START/FINISH
|
||||||
|
|
||||||
|
withstmt ::= expr SETUP_WITH exprlist suite_stmts_opt
|
||||||
|
POP_BLOCK LOAD_CONST COME_FROM
|
||||||
|
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||||
|
|
||||||
|
withstmt ::= expr SETUP_WITH POP_TOP suite_stmts_opt
|
||||||
|
POP_BLOCK LOAD_CONST COME_FROM
|
||||||
|
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||||
|
|
||||||
|
withasstmt ::= expr SETUP_WITH designator suite_stmts_opt
|
||||||
|
POP_BLOCK LOAD_CONST COME_FROM
|
||||||
|
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||||
|
|
||||||
|
inplace_op ::= INPLACE_MATRIX_MULTIPLY
|
||||||
|
binary_op ::= BINARY_MATRIX_MULTIPLY
|
||||||
|
|
||||||
|
# Python 3.5+ does jump optimization
|
||||||
|
# In <.3.5 the below is a JUMP_FORWARD to a JUMP_ABSOLUTE.
|
||||||
|
# in return_stmt, we will need the semantic actions in pysource.py
|
||||||
|
# to work out whether to dedent or not based on the presence of
|
||||||
|
# RETURN_END_IF vs RETURN_VALUE
|
||||||
|
|
||||||
|
ifelsestmtc ::= testexpr c_stmts_opt JUMP_FORWARD else_suitec
|
||||||
|
|
||||||
|
# Python 3.3+ also has yield from. 3.5 does it
|
||||||
|
# differently than 3.3, 3.4
|
||||||
|
|
||||||
|
expr ::= yield_from
|
||||||
|
yield_from ::= expr GET_YIELD_FROM_ITER LOAD_CONST YIELD_FROM
|
||||||
|
|
||||||
|
# Python 3.4+ has more loop optimization that removes
|
||||||
|
# JUMP_FORWARD in some cases, and hence we also don't
|
||||||
|
# see COME_FROM
|
||||||
|
_ifstmts_jump ::= c_stmts_opt
|
||||||
|
"""
|
||||||
|
class Python35ParserSingle(Python35Parser, PythonParserSingle):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Check grammar
|
||||||
|
p = Python35Parser()
|
||||||
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 3.5:
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# print(sorted(p.rule2name.items()))
|
52
uncompyle6/parsers/parse36.py
Normal file
52
uncompyle6/parsers/parse36.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# Copyright (c) 2016 Rocky Bernstein
|
||||||
|
"""
|
||||||
|
spark grammar differences over Python 3.5 for Python 3.6.
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from uncompyle6.parser import PythonParserSingle
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
from uncompyle6.parsers.parse35 import Python35Parser
|
||||||
|
|
||||||
|
class Python36Parser(Python35Parser):
|
||||||
|
|
||||||
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
|
super(Python36Parser, self).__init__(debug_parser)
|
||||||
|
self.customized = {}
|
||||||
|
|
||||||
|
def p_36misc(self, args):
|
||||||
|
"""
|
||||||
|
formatted_value ::= LOAD_FAST FORMAT_VALUE
|
||||||
|
str ::= LOAD_CONST
|
||||||
|
joined_str ::= LOAD_CONST LOAD_ATTR format_value_or_strs
|
||||||
|
BUILD_LIST CALL_FUNCTION
|
||||||
|
format_value_or_strs ::= format_value_or_strs format_value_or_str
|
||||||
|
format_value_or_strs ::= format_value_or_str
|
||||||
|
format_value_or_str ::= format_value
|
||||||
|
format_value_or_str ::= str
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Check grammar
|
||||||
|
p = Python36Parser()
|
||||||
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 3.6:
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# print(sorted(p.rule2name.items()))
|
@@ -18,11 +18,11 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from uncompyle6 import PYTHON3
|
from uncompyle6 import PYTHON3, IS_PYPY
|
||||||
from uncompyle6.scanners.tok import Token
|
from uncompyle6.scanners.tok import Token
|
||||||
|
|
||||||
# The byte code versions we support
|
# The byte code versions we support
|
||||||
PYTHON_VERSIONS = (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5)
|
PYTHON_VERSIONS = (2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||||
|
|
||||||
# FIXME: DRY
|
# FIXME: DRY
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
@@ -251,11 +251,13 @@ class Scanner(object):
|
|||||||
self.Token = tokenClass
|
self.Token = tokenClass
|
||||||
return self.Token
|
return self.Token
|
||||||
|
|
||||||
|
def op_has_argument(op, opc):
|
||||||
|
return op >= opc.HAVE_ARGUMENT
|
||||||
|
|
||||||
def parse_fn_counts(argc):
|
def parse_fn_counts(argc):
|
||||||
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
return ((argc & 0xFF), (argc >> 8) & 0xFF, (argc >> 16) & 0x7FFF)
|
||||||
|
|
||||||
def get_scanner(version, show_asm=None, is_pypy=False):
|
def get_scanner(version, is_pypy=False, show_asm=None):
|
||||||
# Pick up appropriate scanner
|
# Pick up appropriate scanner
|
||||||
if version in PYTHON_VERSIONS:
|
if version in PYTHON_VERSIONS:
|
||||||
v_str = "%s" % (int(version * 10))
|
v_str = "%s" % (int(version * 10))
|
||||||
@@ -282,5 +284,5 @@ def get_scanner(version, show_asm=None, is_pypy=False):
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import inspect, uncompyle6
|
import inspect, uncompyle6
|
||||||
co = inspect.currentframe().f_code
|
co = inspect.currentframe().f_code
|
||||||
scanner = get_scanner(uncompyle6.PYTHON_VERSION, True)
|
scanner = get_scanner(uncompyle6.PYTHON_VERSION, IS_PYPY, True)
|
||||||
tokens, customize = scanner.disassemble(co, {})
|
tokens, customize = scanner.disassemble(co, {})
|
||||||
|
@@ -218,8 +218,19 @@ class Scanner2(scan.Scanner):
|
|||||||
# in arbitrary value 0.
|
# in arbitrary value 0.
|
||||||
customize[opname] = 0
|
customize[opname] = 0
|
||||||
elif op == self.opc.JUMP_ABSOLUTE:
|
elif op == self.opc.JUMP_ABSOLUTE:
|
||||||
|
# Further classify JUMP_ABSOLUTE into backward jumps
|
||||||
|
# which are used in loops, and "CONTINUE" jumps which
|
||||||
|
# may appear in a "continue" statement. The loop-type
|
||||||
|
# and continue-type jumps will help us classify loop
|
||||||
|
# boundaries The continue-type jumps help us get
|
||||||
|
# "continue" statements with would otherwise be turned
|
||||||
|
# into a "pass" statement because JUMPs are sometimes
|
||||||
|
# ignored in rules as just boundary overhead. In
|
||||||
|
# comprehensions we might sometimes classify JUMP_BACK
|
||||||
|
# as CONTINUE, but that's okay since we add a grammar
|
||||||
|
# rule for that.
|
||||||
target = self.get_target(offset)
|
target = self.get_target(offset)
|
||||||
if target < offset:
|
if target <= offset:
|
||||||
if (offset in self.stmts
|
if (offset in self.stmts
|
||||||
and self.code[offset+3] not in (self.opc.END_FINALLY,
|
and self.code[offset+3] not in (self.opc.END_FINALLY,
|
||||||
self.opc.POP_BLOCK)
|
self.opc.POP_BLOCK)
|
||||||
@@ -253,7 +264,7 @@ class Scanner2(scan.Scanner):
|
|||||||
|
|
||||||
if show_asm in ('both', 'after'):
|
if show_asm in ('both', 'after'):
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
print()
|
print()
|
||||||
return tokens, customize
|
return tokens, customize
|
||||||
|
|
||||||
@@ -370,7 +381,8 @@ class Scanner2(scan.Scanner):
|
|||||||
j = self.prev[s]
|
j = self.prev[s]
|
||||||
while code[j] == self.opc.JUMP_ABSOLUTE:
|
while code[j] == self.opc.JUMP_ABSOLUTE:
|
||||||
j = self.prev[j]
|
j = self.prev[j]
|
||||||
if code[j] == self.opc.LIST_APPEND: # list comprehension
|
if (self.version >= 2.3 and
|
||||||
|
code[j] == self.opc.LIST_APPEND): # list comprehension
|
||||||
stmts.remove(s)
|
stmts.remove(s)
|
||||||
continue
|
continue
|
||||||
elif code[s] == self.opc.POP_TOP and code[self.prev[s]] == self.opc.ROT_TWO:
|
elif code[s] == self.opc.POP_TOP and code[self.prev[s]] == self.opc.ROT_TWO:
|
||||||
@@ -865,7 +877,7 @@ if __name__ == "__main__":
|
|||||||
from uncompyle6 import PYTHON_VERSION
|
from uncompyle6 import PYTHON_VERSION
|
||||||
tokens, customize = Scanner2(PYTHON_VERSION).disassemble(co)
|
tokens, customize = Scanner2(PYTHON_VERSION).disassemble(co)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
else:
|
else:
|
||||||
print("Need to be Python 3.2 or greater to demo; I am %s." %
|
print("Need to be Python 3.2 or greater to demo; I am %s." %
|
||||||
PYTHON_VERSION)
|
PYTHON_VERSION)
|
||||||
|
35
uncompyle6/scanners/scanner22.py
Normal file
35
uncompyle6/scanners/scanner22.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Copyright (c) 2016 by Rocky Bernstein
|
||||||
|
"""
|
||||||
|
Python 2.2 bytecode scanner/deparser
|
||||||
|
|
||||||
|
This overlaps Python's 2.2's dis module, but it can be run from
|
||||||
|
Python 3 and other versions of Python. Also, we save token
|
||||||
|
information for later use in deparsing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import uncompyle6.scanners.scanner23 as scan
|
||||||
|
# from uncompyle6.scanners.scanner26 import disassemble as disassemble26
|
||||||
|
|
||||||
|
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||||
|
from xdis.opcodes import opcode_22
|
||||||
|
JUMP_OPs = opcode_22.JUMP_OPs
|
||||||
|
|
||||||
|
# We base this off of 2.3 instead of the other way around
|
||||||
|
# because we cleaned things up this way.
|
||||||
|
# The history is that 2.7 support is the cleanest,
|
||||||
|
# then from that we got 2.6 and so on.
|
||||||
|
class Scanner22(scan.Scanner23):
|
||||||
|
def __init__(self, show_asm=False):
|
||||||
|
scan.Scanner23.__init__(self, show_asm)
|
||||||
|
self.opc = opcode_22
|
||||||
|
self.opname = opcode_22.opname
|
||||||
|
self.version = 2.2
|
||||||
|
self.genexpr_name = '<generator expression>';
|
||||||
|
self.parent_injest = self.disassemble
|
||||||
|
self.disassemble = self.disassemble22
|
||||||
|
return
|
||||||
|
|
||||||
|
def disassemble22(self, co, classname=None, code_objects={}, show_asm=None):
|
||||||
|
tokens, customize = self.parent_injest(co, classname, code_objects, show_asm)
|
||||||
|
tokens = [t for t in tokens if t.type != 'SET_LINENO']
|
||||||
|
return tokens, customize
|
@@ -1,8 +1,8 @@
|
|||||||
# Copyright (c) 2016 by Rocky Bernstein
|
# Copyright (c) 2016 by Rocky Bernstein
|
||||||
"""
|
"""
|
||||||
Python 2.4 bytecode scanner/deparser
|
Python 2.3 bytecode scanner/deparser
|
||||||
|
|
||||||
This overlaps Python's 2.4's dis module, but it can be run from
|
This overlaps Python's 2.3's dis module, but it can be run from
|
||||||
Python 3 and other versions of Python. Also, we save token
|
Python 3 and other versions of Python. Also, we save token
|
||||||
information for later use in deparsing.
|
information for later use in deparsing.
|
||||||
"""
|
"""
|
||||||
@@ -20,6 +20,8 @@ JUMP_OPs = opcode_23.JUMP_OPs
|
|||||||
class Scanner23(scan.Scanner24):
|
class Scanner23(scan.Scanner24):
|
||||||
def __init__(self, show_asm):
|
def __init__(self, show_asm):
|
||||||
scan.Scanner24.__init__(self, show_asm)
|
scan.Scanner24.__init__(self, show_asm)
|
||||||
|
self.opc = opcode_23
|
||||||
|
self.opname = opcode_23.opname
|
||||||
# These are the only differences in initialization between
|
# These are the only differences in initialization between
|
||||||
# 2.3-2.6
|
# 2.3-2.6
|
||||||
self.version = 2.3
|
self.version = 2.3
|
||||||
|
@@ -22,6 +22,8 @@ class Scanner24(scan.Scanner25):
|
|||||||
scan.Scanner25.__init__(self, show_asm)
|
scan.Scanner25.__init__(self, show_asm)
|
||||||
# These are the only differences in initialization between
|
# These are the only differences in initialization between
|
||||||
# 2.4, 2.5 and 2.6
|
# 2.4, 2.5 and 2.6
|
||||||
|
self.opc = opcode_24
|
||||||
|
self.opname = opcode_24.opname
|
||||||
self.version = 2.4
|
self.version = 2.4
|
||||||
self.genexpr_name = '<generator expression>';
|
self.genexpr_name = '<generator expression>';
|
||||||
return
|
return
|
||||||
|
@@ -21,6 +21,8 @@ class Scanner25(scan.Scanner26):
|
|||||||
def __init__(self, show_asm):
|
def __init__(self, show_asm):
|
||||||
# There are no differences in initialization between
|
# There are no differences in initialization between
|
||||||
# 2.5 and 2.6
|
# 2.5 and 2.6
|
||||||
|
self.opc = opcode_25
|
||||||
|
self.opname = opcode_25.opname
|
||||||
scan.Scanner26.__init__(self, show_asm)
|
scan.Scanner26.__init__(self, show_asm)
|
||||||
self.version = 2.5
|
self.version = 2.5
|
||||||
return
|
return
|
||||||
|
@@ -79,6 +79,7 @@ class Scanner26(scan.Scanner2):
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
show_asm = self.show_asm if not show_asm else show_asm
|
show_asm = self.show_asm if not show_asm else show_asm
|
||||||
|
# show_asm = 'both'
|
||||||
if show_asm in ('both', 'before'):
|
if show_asm in ('both', 'before'):
|
||||||
from xdis.bytecode import Bytecode
|
from xdis.bytecode import Bytecode
|
||||||
bytecode = Bytecode(co, self.opc)
|
bytecode = Bytecode(co, self.opc)
|
||||||
@@ -160,7 +161,8 @@ class Scanner26(scan.Scanner2):
|
|||||||
self.opc.IMPORT_STAR))
|
self.opc.IMPORT_STAR))
|
||||||
# Changes IMPORT_NAME to IMPORT_NAME_CONT.
|
# Changes IMPORT_NAME to IMPORT_NAME_CONT.
|
||||||
# Possibly a Python 2.0 hangover
|
# Possibly a Python 2.0 hangover
|
||||||
if len(imports) > 1 and self.version < 2.3:
|
# FIXME: Move into a < 2.2 scanner.
|
||||||
|
if len(imports) > 1 and self.version < 2.2:
|
||||||
last_import = imports[0]
|
last_import = imports[0]
|
||||||
for i in imports[1:]:
|
for i in imports[1:]:
|
||||||
if self.lines[last_import].next > i:
|
if self.lines[last_import].next > i:
|
||||||
|
@@ -108,7 +108,7 @@ if __name__ == "__main__":
|
|||||||
co = inspect.currentframe().f_code
|
co = inspect.currentframe().f_code
|
||||||
tokens, customize = Scanner27().disassemble(co)
|
tokens, customize = Scanner27().disassemble(co)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
print("Need to be Python 2.7 to demo; I am %s." %
|
print("Need to be Python 2.7 to demo; I am %s." %
|
||||||
|
@@ -25,6 +25,7 @@ from __future__ import print_function
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from array import array
|
from array import array
|
||||||
|
|
||||||
|
from uncompyle6.scanner import Scanner, op_has_argument
|
||||||
from xdis.code import iscode
|
from xdis.code import iscode
|
||||||
from xdis.bytecode import Bytecode
|
from xdis.bytecode import Bytecode
|
||||||
from uncompyle6.scanner import Token, parse_fn_counts
|
from uncompyle6.scanner import Token, parse_fn_counts
|
||||||
@@ -42,9 +43,7 @@ globals().update(op3.opmap)
|
|||||||
# POP_JUMP_IF is used by verify
|
# POP_JUMP_IF is used by verify
|
||||||
POP_JUMP_TF = (POP_JUMP_IF_TRUE, POP_JUMP_IF_FALSE)
|
POP_JUMP_TF = (POP_JUMP_IF_TRUE, POP_JUMP_IF_FALSE)
|
||||||
|
|
||||||
import uncompyle6.scanner as scan
|
class Scanner3(Scanner):
|
||||||
|
|
||||||
class Scanner3(scan.Scanner):
|
|
||||||
|
|
||||||
def __init__(self, version, show_asm=None, is_pypy=False):
|
def __init__(self, version, show_asm=None, is_pypy=False):
|
||||||
super(Scanner3, self).__init__(version, show_asm, is_pypy)
|
super(Scanner3, self).__init__(version, show_asm, is_pypy)
|
||||||
@@ -130,10 +129,13 @@ class Scanner3(scan.Scanner):
|
|||||||
for instr in bytecode.get_instructions(co):
|
for instr in bytecode.get_instructions(co):
|
||||||
print(instr._disassemble())
|
print(instr._disassemble())
|
||||||
|
|
||||||
customize = {}
|
|
||||||
# Container for tokens
|
# Container for tokens
|
||||||
tokens = []
|
tokens = []
|
||||||
|
|
||||||
|
customize = {}
|
||||||
|
if self.is_pypy:
|
||||||
|
customize['PyPy'] = 1;
|
||||||
|
|
||||||
self.code = array('B', co.co_code)
|
self.code = array('B', co.co_code)
|
||||||
self.build_lines_data(co)
|
self.build_lines_data(co)
|
||||||
self.build_prev_op()
|
self.build_prev_op()
|
||||||
@@ -231,7 +233,7 @@ class Scanner3(scan.Scanner):
|
|||||||
offset = inst.offset,
|
offset = inst.offset,
|
||||||
linestart = inst.starts_line,
|
linestart = inst.starts_line,
|
||||||
op = op,
|
op = op,
|
||||||
has_arg = (op >= op3.HAVE_ARGUMENT),
|
has_arg = op_has_argument(op, op3),
|
||||||
opc = self.opc
|
opc = self.opc
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -256,7 +258,7 @@ class Scanner3(scan.Scanner):
|
|||||||
argval = (before_args, after_args)
|
argval = (before_args, after_args)
|
||||||
opname = '%s_%d+%d' % (opname, before_args, after_args)
|
opname = '%s_%d+%d' % (opname, before_args, after_args)
|
||||||
elif op == self.opc.JUMP_ABSOLUTE:
|
elif op == self.opc.JUMP_ABSOLUTE:
|
||||||
# Further classifhy JUMP_ABSOLUTE into backward jumps
|
# Further classify JUMP_ABSOLUTE into backward jumps
|
||||||
# which are used in loops, and "CONTINUE" jumps which
|
# which are used in loops, and "CONTINUE" jumps which
|
||||||
# may appear in a "continue" statement. The loop-type
|
# may appear in a "continue" statement. The loop-type
|
||||||
# and continue-type jumps will help us classify loop
|
# and continue-type jumps will help us classify loop
|
||||||
@@ -307,7 +309,7 @@ class Scanner3(scan.Scanner):
|
|||||||
|
|
||||||
if show_asm in ('both', 'after'):
|
if show_asm in ('both', 'after'):
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
print()
|
print()
|
||||||
return tokens, customize
|
return tokens, customize
|
||||||
|
|
||||||
@@ -400,7 +402,7 @@ class Scanner3(scan.Scanner):
|
|||||||
|
|
||||||
# Determine structures and fix jumps in Python versions
|
# Determine structures and fix jumps in Python versions
|
||||||
# since 2.3
|
# since 2.3
|
||||||
self.detect_structure(offset)
|
self.detect_structure(offset, targets)
|
||||||
|
|
||||||
has_arg = (op >= op3.HAVE_ARGUMENT)
|
has_arg = (op >= op3.HAVE_ARGUMENT)
|
||||||
if has_arg:
|
if has_arg:
|
||||||
@@ -514,7 +516,7 @@ class Scanner3(scan.Scanner):
|
|||||||
target += offset + 3
|
target += offset + 3
|
||||||
return target
|
return target
|
||||||
|
|
||||||
def detect_structure(self, offset):
|
def detect_structure(self, offset, targets):
|
||||||
"""
|
"""
|
||||||
Detect structures and their boundaries to fix optimized jumps
|
Detect structures and their boundaries to fix optimized jumps
|
||||||
in python2.3+
|
in python2.3+
|
||||||
@@ -732,6 +734,33 @@ class Scanner3(scan.Scanner):
|
|||||||
self.structs.append({'type': 'if-then',
|
self.structs.append({'type': 'if-then',
|
||||||
'start': start,
|
'start': start,
|
||||||
'end': rtarget})
|
'end': rtarget})
|
||||||
|
# It is important to distingish if this return is inside some sort
|
||||||
|
# except block return
|
||||||
|
jump_prev = prev_op[offset]
|
||||||
|
if self.is_pypy and code[jump_prev] == self.opc.COMPARE_OP:
|
||||||
|
if self.opc.cmp_op[code[jump_prev+1]] == 'exception match':
|
||||||
|
return
|
||||||
|
if self.version >= 3.5:
|
||||||
|
# Python 3.5 may remove as dead code a JUMP
|
||||||
|
# instruction after a RETURN_VALUE. So we check
|
||||||
|
# based on seeing SETUP_EXCEPT various places.
|
||||||
|
if code[rtarget] == self.opc.SETUP_EXCEPT:
|
||||||
|
return
|
||||||
|
# Check that next instruction after pops and jump is
|
||||||
|
# not from SETUP_EXCEPT
|
||||||
|
next_op = rtarget
|
||||||
|
if code[next_op] == self.opc.POP_BLOCK:
|
||||||
|
next_op += self.op_size(self.code[next_op])
|
||||||
|
if code[next_op] == self.opc.JUMP_ABSOLUTE:
|
||||||
|
next_op += self.op_size(self.code[next_op])
|
||||||
|
if next_op in targets:
|
||||||
|
for try_op in targets[next_op]:
|
||||||
|
come_from_op = code[try_op]
|
||||||
|
if come_from_op == self.opc.SETUP_EXCEPT:
|
||||||
|
return
|
||||||
|
pass
|
||||||
|
pass
|
||||||
|
pass
|
||||||
self.return_end_ifs.add(prev_op[rtarget])
|
self.return_end_ifs.add(prev_op[rtarget])
|
||||||
|
|
||||||
elif op in self.jump_if_pop:
|
elif op in self.jump_if_pop:
|
||||||
@@ -801,7 +830,7 @@ if __name__ == "__main__":
|
|||||||
from uncompyle6 import PYTHON_VERSION
|
from uncompyle6 import PYTHON_VERSION
|
||||||
tokens, customize = Scanner3(PYTHON_VERSION).disassemble(co)
|
tokens, customize = Scanner3(PYTHON_VERSION).disassemble(co)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
else:
|
else:
|
||||||
print("Need to be Python 3.2 or greater to demo; I am %s." %
|
print("Need to be Python 3.2 or greater to demo; I am %s." %
|
||||||
PYTHON_VERSION)
|
PYTHON_VERSION)
|
||||||
|
@@ -28,7 +28,7 @@ if __name__ == "__main__":
|
|||||||
co = inspect.currentframe().f_code
|
co = inspect.currentframe().f_code
|
||||||
tokens, customize = Scanner35().disassemble(co)
|
tokens, customize = Scanner35().disassemble(co)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
print(t.format())
|
print(t)
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
print("Need to be Python 3.5 to demo; I am %s." %
|
print("Need to be Python 3.5 to demo; I am %s." %
|
||||||
|
35
uncompyle6/scanners/scanner36.py
Normal file
35
uncompyle6/scanners/scanner36.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Copyright (c) 2016 by Rocky Bernstein
|
||||||
|
"""
|
||||||
|
Python 3.5 bytecode scanner/deparser
|
||||||
|
|
||||||
|
This sets up opcodes Python's 3.5 and calls a generalized
|
||||||
|
scanner routine for Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from uncompyle6.scanners.scanner3 import Scanner3
|
||||||
|
|
||||||
|
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||||
|
from xdis.opcodes import opcode_36 as opc
|
||||||
|
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||||
|
|
||||||
|
class Scanner36(Scanner3):
|
||||||
|
|
||||||
|
def __init__(self, show_asm=None):
|
||||||
|
Scanner3.__init__(self, 3.6, show_asm)
|
||||||
|
return
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from uncompyle6 import PYTHON_VERSION
|
||||||
|
if PYTHON_VERSION == 3.6:
|
||||||
|
import inspect
|
||||||
|
co = inspect.currentframe().f_code
|
||||||
|
tokens, customize = Scanner36().disassemble(co)
|
||||||
|
for t in tokens:
|
||||||
|
print(t.format())
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print("Need to be Python 3.6 to demo; I am %s." %
|
||||||
|
PYTHON_VERSION)
|
@@ -2,7 +2,7 @@
|
|||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
# Copyright (c) 1999 John Aycock
|
# Copyright (c) 1999 John Aycock
|
||||||
|
|
||||||
import sys
|
import re, sys
|
||||||
from uncompyle6 import PYTHON3
|
from uncompyle6 import PYTHON3
|
||||||
|
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
@@ -29,6 +29,9 @@ class Token:
|
|||||||
self.pattr = pattr
|
self.pattr = pattr
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.linestart = linestart
|
self.linestart = linestart
|
||||||
|
if has_arg == False:
|
||||||
|
self.attr = None
|
||||||
|
self.pattr = None
|
||||||
self.opc = opc
|
self.opc = opc
|
||||||
|
|
||||||
def __eq__(self, o):
|
def __eq__(self, o):
|
||||||
@@ -43,13 +46,13 @@ class Token:
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self.type)
|
return str(self.type)
|
||||||
|
|
||||||
def __str__(self):
|
# def __str__(self):
|
||||||
pattr = self.pattr if self.pattr is not None else ''
|
# pattr = self.pattr if self.pattr is not None else ''
|
||||||
prefix = '\n%3d ' % self.linestart if self.linestart else (' ' * 6)
|
# prefix = '\n%3d ' % self.linestart if self.linestart else (' ' * 6)
|
||||||
return (prefix +
|
# return (prefix +
|
||||||
('%9s %-18s %r' % (self.offset, self.type, pattr)))
|
# ('%9s %-18s %r' % (self.offset, self.type, pattr)))
|
||||||
|
|
||||||
def format(self):
|
def __str__(self):
|
||||||
prefix = '\n%4d ' % self.linestart if self.linestart else (' ' * 6)
|
prefix = '\n%4d ' % self.linestart if self.linestart else (' ' * 6)
|
||||||
offset_opname = '%6s %-17s' % (self.offset, self.type)
|
offset_opname = '%6s %-17s' % (self.offset, self.type)
|
||||||
if not self.has_arg:
|
if not self.has_arg:
|
||||||
@@ -66,9 +69,12 @@ class Token:
|
|||||||
pattr = "to " + str(self.pattr)
|
pattr = "to " + str(self.pattr)
|
||||||
pass
|
pass
|
||||||
elif self.op in self.opc.hascompare:
|
elif self.op in self.opc.hascompare:
|
||||||
pattr = self.opc.cmp_op[self.attr]
|
if isinstance(self.attr, int):
|
||||||
|
pattr = self.opc.cmp_op[self.attr]
|
||||||
# And so on. See xdis/bytecode.py get_instructions_bytes
|
# And so on. See xdis/bytecode.py get_instructions_bytes
|
||||||
pass
|
pass
|
||||||
|
elif re.search('_\d+$', self.type):
|
||||||
|
return "%s%s%s" % (prefix, offset_opname, argstr)
|
||||||
else:
|
else:
|
||||||
pattr = ''
|
pattr = ''
|
||||||
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
||||||
|
147
uncompyle6/semantics/aligner.py
Normal file
147
uncompyle6/semantics/aligner.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import sys
|
||||||
|
from uncompyle6.semantics.pysource import (
|
||||||
|
SourceWalker, SourceWalkerError, find_globals, ASSIGN_DOC_STRING, RETURN_NONE)
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
class AligningWalker(SourceWalker, object):
|
||||||
|
def __init__(self, version, scanner, out, showast=False,
|
||||||
|
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||||
|
compile_mode='exec', is_pypy=False):
|
||||||
|
SourceWalker.__init__(self, version, out, scanner, showast, debug_parser,
|
||||||
|
compile_mode, is_pypy)
|
||||||
|
self.desired_line_number = 0
|
||||||
|
self.current_line_number = 0
|
||||||
|
|
||||||
|
def println(self, *data):
|
||||||
|
if data and not(len(data) == 1 and data[0] ==''):
|
||||||
|
self.write(*data)
|
||||||
|
|
||||||
|
self.pending_newlines = max(self.pending_newlines, 1)
|
||||||
|
|
||||||
|
def write(self, *data):
|
||||||
|
from trepan.api import debug; debug()
|
||||||
|
if (len(data) == 1) and data[0] == self.indent:
|
||||||
|
diff = max(self.pending_newlines,
|
||||||
|
self.desired_line_number - self.current_line_number)
|
||||||
|
self.f.write('\n'*diff)
|
||||||
|
self.current_line_number += diff
|
||||||
|
self.pending_newlines = 0
|
||||||
|
if (len(data) == 0) or (len(data) == 1 and data[0] == ''):
|
||||||
|
return
|
||||||
|
|
||||||
|
out = ''.join((str(j) for j in data))
|
||||||
|
n = 0
|
||||||
|
for i in out:
|
||||||
|
if i == '\n':
|
||||||
|
n += 1
|
||||||
|
if n == len(out):
|
||||||
|
self.pending_newlines = max(self.pending_newlines, n)
|
||||||
|
return
|
||||||
|
elif n:
|
||||||
|
self.pending_newlines = max(self.pending_newlines, n)
|
||||||
|
out = out[n:]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.pending_newlines > 0:
|
||||||
|
diff = max(self.pending_newlines,
|
||||||
|
self.desired_line_number - self.current_line_number)
|
||||||
|
self.f.write('\n'*diff)
|
||||||
|
self.current_line_number += diff
|
||||||
|
self.pending_newlines = 0
|
||||||
|
|
||||||
|
for i in out[::-1]:
|
||||||
|
if i == '\n':
|
||||||
|
self.pending_newlines += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.pending_newlines:
|
||||||
|
out = out[:-self.pending_newlines]
|
||||||
|
self.f.write(out)
|
||||||
|
|
||||||
|
def default(self, node):
|
||||||
|
mapping = self._get_mapping(node)
|
||||||
|
if hasattr(node, 'linestart'):
|
||||||
|
if node.linestart:
|
||||||
|
self.desired_line_number = node.linestart
|
||||||
|
table = mapping[0]
|
||||||
|
key = node
|
||||||
|
|
||||||
|
for i in mapping[1:]:
|
||||||
|
key = key[i]
|
||||||
|
pass
|
||||||
|
|
||||||
|
if key.type in table:
|
||||||
|
self.engine(table[key.type], node)
|
||||||
|
self.prune()
|
||||||
|
|
||||||
|
from xdis.code import iscode
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
from uncompyle6.show import (
|
||||||
|
maybe_show_asm,
|
||||||
|
)
|
||||||
|
|
||||||
|
def align_deparse_code(version, co, out=sys.stderr, showasm=False, showast=False,
|
||||||
|
showgrammar=False, code_objects={}, compile_mode='exec', is_pypy=False):
|
||||||
|
"""
|
||||||
|
disassembles and deparses a given code block 'co'
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert iscode(co)
|
||||||
|
# store final output stream for case of error
|
||||||
|
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||||
|
|
||||||
|
tokens, customize = scanner.disassemble(co, code_objects=code_objects)
|
||||||
|
maybe_show_asm(showasm, tokens)
|
||||||
|
|
||||||
|
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||||
|
if showgrammar:
|
||||||
|
debug_parser['reduce'] = showgrammar
|
||||||
|
debug_parser['errorstack'] = True
|
||||||
|
|
||||||
|
# Build AST from disassembly.
|
||||||
|
deparsed = AligningWalker(version, scanner, out, showast=showast,
|
||||||
|
debug_parser=debug_parser, compile_mode=compile_mode,
|
||||||
|
is_pypy = is_pypy)
|
||||||
|
|
||||||
|
isTopLevel = co.co_name == '<module>'
|
||||||
|
deparsed.ast = deparsed.build_ast(tokens, customize, isTopLevel=isTopLevel)
|
||||||
|
|
||||||
|
assert deparsed.ast == 'stmts', 'Should have parsed grammar start'
|
||||||
|
|
||||||
|
del tokens # save memory
|
||||||
|
|
||||||
|
deparsed.mod_globs = find_globals(deparsed.ast, set())
|
||||||
|
|
||||||
|
# convert leading '__doc__ = "..." into doc string
|
||||||
|
try:
|
||||||
|
if deparsed.ast[0][0] == ASSIGN_DOC_STRING(co.co_consts[0]):
|
||||||
|
deparsed.print_docstring('', co.co_consts[0])
|
||||||
|
del deparsed.ast[0]
|
||||||
|
if deparsed.ast[-1] == RETURN_NONE:
|
||||||
|
deparsed.ast.pop() # remove last node
|
||||||
|
# todo: if empty, add 'pass'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# What we've been waiting for: Generate source from AST!
|
||||||
|
deparsed.gen_source(deparsed.ast, co.co_name, customize)
|
||||||
|
|
||||||
|
for g in deparsed.mod_globs:
|
||||||
|
deparsed.write('# global %s ## Warning: Unused global' % g)
|
||||||
|
|
||||||
|
if deparsed.ERROR:
|
||||||
|
raise SourceWalkerError("Deparsing stopped due to parse error")
|
||||||
|
return deparsed
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
def deparse_test(co):
|
||||||
|
"This is a docstring"
|
||||||
|
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||||
|
# deparsed = deparse_code(sys_version, co, showasm=True, showast=True)
|
||||||
|
deparsed = align_deparse_code(sys_version, co, showasm=False, showast=False,
|
||||||
|
showgrammar=False)
|
||||||
|
print(deparsed.text)
|
||||||
|
return
|
||||||
|
deparse_test(deparse_test.__code__)
|
@@ -34,7 +34,7 @@ For example in:
|
|||||||
|
|
||||||
The node will be associated with the text break, excluding the trailing newline.
|
The node will be associated with the text break, excluding the trailing newline.
|
||||||
|
|
||||||
Note we assocate the accumulated text with the node normally, but we just don't
|
Note we associate the accumulated text with the node normally, but we just don't
|
||||||
do it recursively which is where offsets are probably located.
|
do it recursively which is where offsets are probably located.
|
||||||
|
|
||||||
2. %b
|
2. %b
|
||||||
@@ -55,10 +55,9 @@ from __future__ import print_function
|
|||||||
|
|
||||||
import re, sys
|
import re, sys
|
||||||
|
|
||||||
from uncompyle6 import PYTHON3
|
from uncompyle6 import PYTHON3, IS_PYPY
|
||||||
from xdis.code import iscode
|
from xdis.code import iscode
|
||||||
from uncompyle6.semantics import pysource
|
from uncompyle6.semantics import pysource
|
||||||
from uncompyle6.parser import get_python_parser
|
|
||||||
from uncompyle6 import parser
|
from uncompyle6 import parser
|
||||||
from uncompyle6.scanner import Token, Code, get_scanner
|
from uncompyle6.scanner import Token, Code, get_scanner
|
||||||
from uncompyle6.show import (
|
from uncompyle6.show import (
|
||||||
@@ -78,7 +77,7 @@ else:
|
|||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
|
||||||
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException, \
|
from spark_parser import GenericASTTraversalPruningException, \
|
||||||
DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
@@ -110,37 +109,19 @@ TABLE_DIRECT_FRAGMENT = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
MAP_DIRECT_FRAGMENT = dict(TABLE_DIRECT, **TABLE_DIRECT_FRAGMENT),
|
|
||||||
|
|
||||||
|
|
||||||
class FragmentsWalker(pysource.SourceWalker, object):
|
class FragmentsWalker(pysource.SourceWalker, object):
|
||||||
|
|
||||||
|
MAP_DIRECT_FRAGMENT = ()
|
||||||
|
|
||||||
stacked_params = ('f', 'indent', 'isLambda', '_globals')
|
stacked_params = ('f', 'indent', 'isLambda', '_globals')
|
||||||
|
|
||||||
def __init__(self, version, scanner, showast=False,
|
def __init__(self, version, scanner, showast=False,
|
||||||
debug_parser=PARSER_DEFAULT_DEBUG,
|
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||||
compile_mode='exec', is_pypy=False):
|
compile_mode='exec', is_pypy=False):
|
||||||
GenericASTTraversal.__init__(self, ast=None)
|
pysource.SourceWalker.__init__(self, version=version, out=StringIO(),
|
||||||
self.scanner = scanner
|
scanner=scanner,
|
||||||
params = {
|
showast=showast, debug_parser=debug_parser,
|
||||||
'f': StringIO(),
|
compile_mode=compile_mode, is_pypy=is_pypy)
|
||||||
'indent': '',
|
|
||||||
}
|
|
||||||
self.version = version
|
|
||||||
self.p = get_python_parser(
|
|
||||||
version, dict(debug_parser),
|
|
||||||
compile_mode=compile_mode, is_pypy=is_pypy
|
|
||||||
)
|
|
||||||
self.showast = showast
|
|
||||||
self.params = params
|
|
||||||
self.param_stack = []
|
|
||||||
self.ERROR = None
|
|
||||||
self.prec = 100
|
|
||||||
self.return_none = False
|
|
||||||
self.mod_globs = set()
|
|
||||||
self.currentclass = None
|
|
||||||
self.classes = []
|
|
||||||
self.pending_newlines = 0
|
|
||||||
|
|
||||||
# hide_internal suppresses displaying the additional instructions that sometimes
|
# hide_internal suppresses displaying the additional instructions that sometimes
|
||||||
# exist in code but but were not written in the source code.
|
# exist in code but but were not written in the source code.
|
||||||
@@ -150,12 +131,13 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
# deparsing we generally do need to see these instructions since we may be stopped
|
# deparsing we generally do need to see these instructions since we may be stopped
|
||||||
# at one. So here we do not want to suppress showing such instructions.
|
# at one. So here we do not want to suppress showing such instructions.
|
||||||
self.hide_internal = False
|
self.hide_internal = False
|
||||||
|
|
||||||
self.name = None
|
|
||||||
|
|
||||||
self.offsets = {}
|
self.offsets = {}
|
||||||
self.last_finish = -1
|
self.last_finish = -1
|
||||||
|
|
||||||
|
# FIXME: is there a better way?
|
||||||
|
global MAP_DIRECT_FRAGMENT
|
||||||
|
MAP_DIRECT_FRAGMENT = dict(TABLE_DIRECT, **TABLE_DIRECT_FRAGMENT),
|
||||||
|
|
||||||
f = property(lambda s: s.params['f'],
|
f = property(lambda s: s.params['f'],
|
||||||
lambda s, x: s.params.__setitem__('f', x),
|
lambda s, x: s.params.__setitem__('f', x),
|
||||||
lambda s: s.params.__delitem__('f'),
|
lambda s: s.params.__delitem__('f'),
|
||||||
@@ -339,7 +321,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
self.preorder(node[0])
|
self.preorder(node[0])
|
||||||
finish = len(self.f.getvalue())
|
finish = len(self.f.getvalue())
|
||||||
if hasattr(node[0], 'offset'):
|
if hasattr(node[0], 'offset'):
|
||||||
self.set_pos_info(node[0], self.last_finish, )
|
self.set_pos_info(node[0], start, len(self.f.getvalue()))
|
||||||
self.write(')')
|
self.write(')')
|
||||||
self.last_finish = finish + 1
|
self.last_finish = finish + 1
|
||||||
else:
|
else:
|
||||||
@@ -534,7 +516,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
self.write(func_name)
|
self.write(func_name)
|
||||||
|
|
||||||
self.indentMore()
|
self.indentMore()
|
||||||
self.make_function(node, isLambda=False, code_index=code_index)
|
self.make_function(node, isLambda=False, code=code)
|
||||||
|
|
||||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||||
|
|
||||||
@@ -1613,7 +1595,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
self.set_pos_info(last_node, startnode_start, self.last_finish)
|
||||||
return
|
return
|
||||||
|
|
||||||
def make_function(self, node, isLambda, nested=1, code_index=-2):
|
def make_function(self, node, isLambda, nested=1, code=None):
|
||||||
"""Dump function defintion, doc string, and function body."""
|
"""Dump function defintion, doc string, and function body."""
|
||||||
|
|
||||||
def build_param(ast, name, default):
|
def build_param(ast, name, default):
|
||||||
@@ -1664,7 +1646,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
if self.version > 3.0 and isLambda and iscode(node[-3].attr):
|
if self.version > 3.0 and isLambda and iscode(node[-3].attr):
|
||||||
code = node[-3].attr
|
code = node[-3].attr
|
||||||
else:
|
else:
|
||||||
code = node[code_index].attr
|
code = code.attr
|
||||||
|
|
||||||
assert iscode(code)
|
assert iscode(code)
|
||||||
code = Code(code, self.scanner, self.currentclass)
|
code = Code(code, self.scanner, self.currentclass)
|
||||||
@@ -1748,7 +1730,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
||||||
showgrammar=False):
|
showgrammar=False, is_pypy=False):
|
||||||
"""
|
"""
|
||||||
Convert the code object co into a python source fragment.
|
Convert the code object co into a python source fragment.
|
||||||
|
|
||||||
@@ -1774,7 +1756,7 @@ def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
|||||||
|
|
||||||
assert iscode(co)
|
assert iscode(co)
|
||||||
# store final output stream for case of error
|
# store final output stream for case of error
|
||||||
scanner = get_scanner(version)
|
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||||
|
|
||||||
tokens, customize = scanner.disassemble(co)
|
tokens, customize = scanner.disassemble(co)
|
||||||
|
|
||||||
@@ -1816,10 +1798,10 @@ def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
def deparse_test(co):
|
def deparse_test(co, is_pypy=IS_PYPY):
|
||||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||||
walk = deparse_code(sys_version, co, showasm=False, showast=False,
|
walk = deparse_code(sys_version, co, showasm=False, showast=False,
|
||||||
showgrammar=False)
|
showgrammar=False, is_pypy=IS_PYPY)
|
||||||
print("deparsed source")
|
print("deparsed source")
|
||||||
print(walk.text, "\n")
|
print(walk.text, "\n")
|
||||||
print('------------------------')
|
print('------------------------')
|
||||||
|
@@ -140,18 +140,6 @@ TABLE_R = {
|
|||||||
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
||||||
}
|
}
|
||||||
|
|
||||||
if not PYTHON3:
|
|
||||||
TABLE_R.update({
|
|
||||||
'STORE_SLICE+0': ( '%c[:]', 0 ),
|
|
||||||
'STORE_SLICE+1': ( '%c[%p:]', 0, (1, 100) ),
|
|
||||||
'STORE_SLICE+2': ( '%c[:%p]', 0, (1, 100) ),
|
|
||||||
'STORE_SLICE+3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
|
||||||
'DELETE_SLICE+0': ( '%|del %c[:]\n', 0 ),
|
|
||||||
'DELETE_SLICE+1': ( '%|del %c[%c:]\n', 0, 1 ),
|
|
||||||
'DELETE_SLICE+2': ( '%|del %c[:%c]\n', 0, 1 ),
|
|
||||||
'DELETE_SLICE+3': ( '%|del %c[%c:%c]\n', 0, 1, 2 ),
|
|
||||||
})
|
|
||||||
|
|
||||||
TABLE_R0 = {
|
TABLE_R0 = {
|
||||||
# 'BUILD_LIST': ( '[%C]', (0,-1,', ') ),
|
# 'BUILD_LIST': ( '[%C]', (0,-1,', ') ),
|
||||||
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
||||||
@@ -246,7 +234,6 @@ TABLE_DIRECT = {
|
|||||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
||||||
# 'dup_topx': ( '%c', 0),
|
|
||||||
'designList': ( '%c = %c', 0, -1 ),
|
'designList': ( '%c = %c', 0, -1 ),
|
||||||
'and': ( '%c and %c', 0, 2 ),
|
'and': ( '%c and %c', 0, 2 ),
|
||||||
'ret_and': ( '%c and %c', 0, 2 ),
|
'ret_and': ( '%c and %c', 0, 2 ),
|
||||||
@@ -274,8 +261,6 @@ TABLE_DIRECT = {
|
|||||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||||
'importlist2': ( '%C', (0, maxint, ', ') ),
|
'importlist2': ( '%C', (0, maxint, ', ') ),
|
||||||
|
|
||||||
'assert': ( '%|assert %c\n' , 0 ),
|
|
||||||
'assert2': ( '%|assert %c, %c\n' , 0, 3 ),
|
|
||||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2),
|
'print_items_stmt': ( '%|print %c%c,\n', 0, 2),
|
||||||
@@ -337,7 +322,6 @@ TABLE_DIRECT = {
|
|||||||
'except_cond2': ( '%|except %c as %c:\n', 1, 5 ),
|
'except_cond2': ( '%|except %c as %c:\n', 1, 5 ),
|
||||||
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
||||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
|
||||||
'withstmt': ( '%|with %c:\n%+%c%-', 0, 3),
|
'withstmt': ( '%|with %c:\n%+%c%-', 0, 3),
|
||||||
'withasstmt': ( '%|with %c as %c:\n%+%c%-', 0, 2, 3),
|
'withasstmt': ( '%|with %c as %c:\n%+%c%-', 0, 2, 3),
|
||||||
'passstmt': ( '%|pass\n', ),
|
'passstmt': ( '%|pass\n', ),
|
||||||
@@ -345,53 +329,9 @@ TABLE_DIRECT = {
|
|||||||
'kv': ( '%c: %c', 3, 1 ),
|
'kv': ( '%c: %c', 3, 1 ),
|
||||||
'kv2': ( '%c: %c', 1, 2 ),
|
'kv2': ( '%c: %c', 1, 2 ),
|
||||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||||
|
|
||||||
# CE - Fixes for tuples
|
|
||||||
'assign2': ( '%|%c, %c = %c, %c\n', 3, 4, 0, 1 ),
|
|
||||||
'assign3': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 6, 7, 0, 1, 2 ),
|
|
||||||
|
|
||||||
#######################
|
|
||||||
# Python 2.3 Additions
|
|
||||||
#######################
|
|
||||||
|
|
||||||
# Import style for 2.0-2.3
|
|
||||||
'importstmt20': ( '%|import %c\n', 1),
|
|
||||||
'importstar20': ( '%|from %[1]{pattr} import *\n', ),
|
|
||||||
'importfrom20': ( '%|from %[1]{pattr} import %c\n', 2 ),
|
|
||||||
'importlist20': ( '%C', (0, maxint, ', ') ),
|
|
||||||
|
|
||||||
#######################
|
|
||||||
# Python 2.5 Additions
|
|
||||||
#######################
|
|
||||||
|
|
||||||
# Import style for 2.5+
|
|
||||||
'importstmt': ( '%|import %c\n', 2),
|
'importstmt': ( '%|import %c\n', 2),
|
||||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
|
||||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||||
'importmultiple': ( '%|import %c%c\n', 2, 3 ),
|
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||||
'import_cont' : ( ', %c', 2 ),
|
|
||||||
|
|
||||||
########################
|
|
||||||
# Python 3.2 and 3.3 only
|
|
||||||
#######################
|
|
||||||
'store_locals': ( '%|# inspect.currentframe().f_locals = __locals__\n', ),
|
|
||||||
|
|
||||||
########################
|
|
||||||
# Python 3.4+ Additions
|
|
||||||
#######################
|
|
||||||
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
|
||||||
|
|
||||||
########################
|
|
||||||
# PyPy Additions
|
|
||||||
# FIXME: we could remove the corresponding
|
|
||||||
# rules without _pypy if we have pypy
|
|
||||||
#######################
|
|
||||||
'assert_pypy': ( '%|assert %c\n' , 1 ),
|
|
||||||
'assert2_pypy': ( '%|assert %c, %c\n' , 1, 4 ),
|
|
||||||
'trystmt_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
|
||||||
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
|
||||||
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
|
||||||
'assign2_pypy': ( '%|%c, %c = %c, %c\n', 3, 2, 0, 1),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -493,7 +433,7 @@ class ParserError(python_parser.ParserError):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
lines = ['--- This code section failed: ---']
|
lines = ['--- This code section failed: ---']
|
||||||
lines.extend([i.format() for i in self.tokens])
|
lines.extend([str(i) for i in self.tokens])
|
||||||
lines.extend( ['', str(self.error)] )
|
lines.extend( ['', str(self.error)] )
|
||||||
return '\n'.join(lines)
|
return '\n'.join(lines)
|
||||||
|
|
||||||
@@ -545,8 +485,8 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
'indent': '',
|
'indent': '',
|
||||||
}
|
}
|
||||||
self.version = version
|
self.version = version
|
||||||
self.p = get_python_parser(version, debug_parser=debug_parser,
|
self.p = get_python_parser(version, debug_parser=dict(debug_parser),
|
||||||
compile_mode=compile_mode)
|
compile_mode=compile_mode, is_pypy=is_pypy)
|
||||||
self.debug_parser = dict(debug_parser)
|
self.debug_parser = dict(debug_parser)
|
||||||
self.showast = showast
|
self.showast = showast
|
||||||
self.params = params
|
self.params = params
|
||||||
@@ -568,9 +508,92 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.version = version
|
self.version = version
|
||||||
self.is_pypy = is_pypy
|
self.is_pypy = is_pypy
|
||||||
|
|
||||||
|
self.customize_for_version(is_pypy, version)
|
||||||
|
return
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def customize_for_version(is_pypy, version):
|
||||||
|
if is_pypy:
|
||||||
|
########################
|
||||||
|
# PyPy changes
|
||||||
|
#######################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'assert_pypy': ( '%|assert %c\n' , 1 ),
|
||||||
|
'assert2_pypy': ( '%|assert %c, %c\n' , 1, 4 ),
|
||||||
|
'trystmt_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||||
|
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
||||||
|
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
||||||
|
'assign2_pypy': ( '%|%c, %c = %c, %c\n', 3, 2, 0, 1),
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
########################
|
||||||
|
# Without PyPy
|
||||||
|
#######################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'assert': ( '%|assert %c\n' , 0 ),
|
||||||
|
'assert2': ( '%|assert %c, %c\n' , 0, 3 ),
|
||||||
|
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||||
|
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
||||||
|
'assign2': ( '%|%c, %c = %c, %c\n', 3, 4, 0, 1 ),
|
||||||
|
'assign3': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 6, 7, 0, 1, 2 ),
|
||||||
|
})
|
||||||
|
if version < 3.0:
|
||||||
|
TABLE_R.update({
|
||||||
|
'STORE_SLICE+0': ( '%c[:]', 0 ),
|
||||||
|
'STORE_SLICE+1': ( '%c[%p:]', 0, (1, 100) ),
|
||||||
|
'STORE_SLICE+2': ( '%c[:%p]', 0, (1, 100) ),
|
||||||
|
'STORE_SLICE+3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||||
|
'DELETE_SLICE+0': ( '%|del %c[:]\n', 0 ),
|
||||||
|
'DELETE_SLICE+1': ( '%|del %c[%c:]\n', 0, 1 ),
|
||||||
|
'DELETE_SLICE+2': ( '%|del %c[:%c]\n', 0, 1 ),
|
||||||
|
'DELETE_SLICE+3': ( '%|del %c[%c:%c]\n', 0, 1, 2 ),
|
||||||
|
})
|
||||||
|
|
||||||
if 2.0 <= version <= 2.3:
|
if 2.0 <= version <= 2.3:
|
||||||
TABLE_DIRECT['tryfinallystmt'] = (
|
TABLE_DIRECT.update({
|
||||||
'%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 4 )
|
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 4 )
|
||||||
|
})
|
||||||
|
###########################
|
||||||
|
# Import style for 2.0-2.3
|
||||||
|
###########################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'importstmt20': ( '%|import %c\n', 1),
|
||||||
|
'importstar20': ( '%|from %[1]{pattr} import *\n', ),
|
||||||
|
'importfrom20': ( '%|from %[1]{pattr} import %c\n', 2 ),
|
||||||
|
'importlist20': ( '%C', (0, maxint, ', ') ),
|
||||||
|
})
|
||||||
|
|
||||||
|
elif version >= 2.5:
|
||||||
|
########################
|
||||||
|
# Import style for 2.5+
|
||||||
|
########################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'importmultiple': ( '%|import %c%c\n', 2, 3 ),
|
||||||
|
'import_cont' : ( ', %c', 2 ),
|
||||||
|
})
|
||||||
|
|
||||||
|
##########################
|
||||||
|
# Python 3.2 and 3.3 only
|
||||||
|
##########################
|
||||||
|
if 3.2 <= version <= 3.3:
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'store_locals': ( '%|# inspect.currentframe().f_locals = __locals__\n', ),
|
||||||
|
})
|
||||||
|
elif version >= 3.4:
|
||||||
|
########################
|
||||||
|
# Python 3.4+ Additions
|
||||||
|
#######################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
||||||
|
})
|
||||||
|
if version >= 3.6:
|
||||||
|
########################
|
||||||
|
# Python 3.6+ Additions
|
||||||
|
#######################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'formatted_value': ( '{%c}', 0),
|
||||||
|
'joined_str': ( "f'%c'", 2),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
|
|
||||||
f = property(lambda s: s.params['f'],
|
f = property(lambda s: s.params['f'],
|
||||||
@@ -724,11 +747,6 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
if self.return_none or node != AST('return_stmt', [AST('ret_expr', [NONE]), Token('RETURN_VALUE')]):
|
if self.return_none or node != AST('return_stmt', [AST('ret_expr', [NONE]), Token('RETURN_VALUE')]):
|
||||||
self.write(' ')
|
self.write(' ')
|
||||||
self.preorder(node[0])
|
self.preorder(node[0])
|
||||||
# 3.5 does jump optimization. The RETURN_END_IF in the return
|
|
||||||
# statement means to dedent. Earlier versions will just have
|
|
||||||
# RETURN_VALUE it is done by a nonterminal in the grammar.
|
|
||||||
if self.version >= 3.5 and node[-1] == 'RETURN_END_IF':
|
|
||||||
self.indentLess()
|
|
||||||
self.println()
|
self.println()
|
||||||
self.prune() # stop recursing
|
self.prune() # stop recursing
|
||||||
|
|
||||||
@@ -826,6 +844,10 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.prec += 1
|
self.prec += 1
|
||||||
self.prune()
|
self.prune()
|
||||||
|
|
||||||
|
def n_str(self, node):
|
||||||
|
self.write(node[0].pattr)
|
||||||
|
self.prune()
|
||||||
|
|
||||||
def n_LOAD_CONST(self, node):
|
def n_LOAD_CONST(self, node):
|
||||||
data = node.pattr; datatype = type(data)
|
data = node.pattr; datatype = type(data)
|
||||||
if isinstance(datatype, int) and data == minint:
|
if isinstance(datatype, int) and data == minint:
|
||||||
@@ -1015,7 +1037,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.write(func_name)
|
self.write(func_name)
|
||||||
|
|
||||||
self.indentMore()
|
self.indentMore()
|
||||||
self.make_function(node, isLambda=False, code=code)
|
self.make_function(node, isLambda=False, codeNode=code)
|
||||||
|
|
||||||
if len(self.param_stack) > 1:
|
if len(self.param_stack) > 1:
|
||||||
self.write('\n\n')
|
self.write('\n\n')
|
||||||
@@ -1025,7 +1047,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.prune() # stop recursing
|
self.prune() # stop recursing
|
||||||
|
|
||||||
def n_mklambda(self, node):
|
def n_mklambda(self, node):
|
||||||
self.make_function(node, isLambda=True, code=node[-2])
|
self.make_function(node, isLambda=True, codeNode=node[-2])
|
||||||
self.prune() # stop recursing
|
self.prune() # stop recursing
|
||||||
|
|
||||||
def n_list_compr(self, node):
|
def n_list_compr(self, node):
|
||||||
@@ -1187,7 +1209,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.write('{')
|
self.write('{')
|
||||||
if node[0] in ['LOAD_SETCOMP', 'LOAD_DICTCOMP']:
|
if node[0] in ['LOAD_SETCOMP', 'LOAD_DICTCOMP']:
|
||||||
self.comprehension_walk3(node, 1, 0)
|
self.comprehension_walk3(node, 1, 0)
|
||||||
elif node[0].type == 'load_closure':
|
elif node[0].type == 'load_closure' and self.version >= 3.0:
|
||||||
self.setcomprehension_walk3(node, collection_index=4)
|
self.setcomprehension_walk3(node, collection_index=4)
|
||||||
else:
|
else:
|
||||||
self.comprehension_walk(node, iter_index=4)
|
self.comprehension_walk(node, iter_index=4)
|
||||||
@@ -1708,8 +1730,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
%c, %C, and so on.
|
%c, %C, and so on.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# self.println("-----")
|
# self.println("----> ", startnode.type)
|
||||||
# self.print(startnode)
|
|
||||||
|
|
||||||
fmt = entry[0]
|
fmt = entry[0]
|
||||||
arg = 1
|
arg = 1
|
||||||
@@ -1828,11 +1849,10 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
continue
|
continue
|
||||||
op = k[ :k.rfind('_') ]
|
op = k[ :k.rfind('_') ]
|
||||||
|
|
||||||
if k == 'CALL_METHOD':
|
if k.startswith('CALL_METHOD'):
|
||||||
# This happens in PyPy only
|
# This happens in PyPy only
|
||||||
TABLE_R[k] = ('%c(%P)', 0, (1, -1, ', ', 100))
|
TABLE_R[k] = ('%c(%P)', 0, (1, -1, ', ', 100))
|
||||||
|
elif op == 'CALL_FUNCTION':
|
||||||
if op == 'CALL_FUNCTION':
|
|
||||||
TABLE_R[k] = ('%c(%P)', 0, (1, -1, ', ', 100))
|
TABLE_R[k] = ('%c(%P)', 0, (1, -1, ', ', 100))
|
||||||
elif op in ('CALL_FUNCTION_VAR',
|
elif op in ('CALL_FUNCTION_VAR',
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||||
@@ -1896,7 +1916,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
# return self.traverse(node[1])
|
# return self.traverse(node[1])
|
||||||
raise Exception("Can't find tuple parameter " + name)
|
raise Exception("Can't find tuple parameter " + name)
|
||||||
|
|
||||||
def make_function(self, node, isLambda, nested=1, code=None):
|
def make_function(self, node, isLambda, nested=1, codeNode=None):
|
||||||
"""Dump function defintion, doc string, and function body."""
|
"""Dump function defintion, doc string, and function body."""
|
||||||
|
|
||||||
def build_param(ast, name, default):
|
def build_param(ast, name, default):
|
||||||
@@ -1937,12 +1957,12 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
pos_args, kw_args, annotate_args = args_node.attr
|
pos_args, kw_args, annotate_args = args_node.attr
|
||||||
else:
|
else:
|
||||||
defparams = node[:args_node.attr]
|
defparams = node[:args_node.attr]
|
||||||
kw_args, annotate_args = (0, 0)
|
kw_args = 0
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if 3.0 <= self.version <= 3.2:
|
if 3.0 <= self.version <= 3.2:
|
||||||
lambda_index = -2
|
lambda_index = -2
|
||||||
elif 3.03<= self.version:
|
elif 3.03 <= self.version:
|
||||||
lambda_index = -3
|
lambda_index = -3
|
||||||
else:
|
else:
|
||||||
lambda_index = None
|
lambda_index = None
|
||||||
@@ -1951,7 +1971,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||||
code = node[lambda_index].attr
|
code = node[lambda_index].attr
|
||||||
else:
|
else:
|
||||||
code = code.attr
|
code = codeNode.attr
|
||||||
|
|
||||||
assert iscode(code)
|
assert iscode(code)
|
||||||
code = Code(code, self.scanner, self.currentclass)
|
code = Code(code, self.scanner, self.currentclass)
|
||||||
|
@@ -14,7 +14,7 @@ def maybe_show_asm(showasm, tokens):
|
|||||||
if showasm:
|
if showasm:
|
||||||
stream = showasm if hasattr(showasm, 'write') else sys.stdout
|
stream = showasm if hasattr(showasm, 'write') else sys.stdout
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
stream.write(t.format())
|
stream.write(str(t))
|
||||||
stream.write('\n')
|
stream.write('\n')
|
||||||
|
|
||||||
|
|
||||||
@@ -30,7 +30,7 @@ def maybe_show_ast(showast, ast):
|
|||||||
"""
|
"""
|
||||||
if showast:
|
if showast:
|
||||||
stream = showast if hasattr(showast, 'write') else sys.stdout
|
stream = showast if hasattr(showast, 'write') else sys.stdout
|
||||||
stream.write(repr(ast))
|
stream.write(str(ast))
|
||||||
stream.write('\n')
|
stream.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
#
|
#
|
||||||
# (C) Copyright 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# (C) Copyright 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
# (C) Copyright 2015 by Rocky Bernstein
|
# (C) Copyright 2015-2016 by Rocky Bernstein
|
||||||
#
|
#
|
||||||
"""
|
"""
|
||||||
byte-code verification
|
byte-code verification
|
||||||
@@ -132,7 +132,7 @@ class CmpErrorMember(VerifyCmpError):
|
|||||||
# these members are ignored
|
# these members are ignored
|
||||||
__IGNORE_CODE_MEMBERS__ = ['co_filename', 'co_firstlineno', 'co_lnotab', 'co_stacksize', 'co_names']
|
__IGNORE_CODE_MEMBERS__ = ['co_filename', 'co_firstlineno', 'co_lnotab', 'co_stacksize', 'co_names']
|
||||||
|
|
||||||
def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, name=''):
|
||||||
"""
|
"""
|
||||||
Compare two code-objects.
|
Compare two code-objects.
|
||||||
|
|
||||||
@@ -193,11 +193,19 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
|||||||
import uncompyle6.scanners.scanner26 as scan
|
import uncompyle6.scanners.scanner26 as scan
|
||||||
scanner = scan.Scanner26()
|
scanner = scan.Scanner26()
|
||||||
elif version == 2.7:
|
elif version == 2.7:
|
||||||
import uncompyle6.scanners.scanner27 as scan
|
if is_pypy:
|
||||||
scanner = scan.Scanner27()
|
import uncompyle6.scanners.pypy27 as scan
|
||||||
|
scanner = scan.ScannerPyPy27(show_asm=False)
|
||||||
|
else:
|
||||||
|
import uncompyle6.scanners.scanner27 as scan
|
||||||
|
scanner = scan.Scanner27()
|
||||||
elif version == 3.2:
|
elif version == 3.2:
|
||||||
import uncompyle6.scanners.scanner32 as scan
|
if is_pypy:
|
||||||
scanner = scan.Scanner32()
|
import uncompyle6.scanners.pypy32 as scan
|
||||||
|
scanner = scan.ScannerPyPy32()
|
||||||
|
else:
|
||||||
|
import uncompyle6.scanners.scanner32 as scan
|
||||||
|
scanner = scan.Scanner32()
|
||||||
elif version == 3.3:
|
elif version == 3.3:
|
||||||
import uncompyle6.scanners.scanner33 as scan
|
import uncompyle6.scanners.scanner33 as scan
|
||||||
scanner = scan.Scanner33()
|
scanner = scan.Scanner33()
|
||||||
@@ -207,6 +215,9 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
|||||||
elif version == 3.5:
|
elif version == 3.5:
|
||||||
import uncompyle6.scanners.scanner35 as scan
|
import uncompyle6.scanners.scanner35 as scan
|
||||||
scanner = scan.Scanner35()
|
scanner = scan.Scanner35()
|
||||||
|
elif version == 3.6:
|
||||||
|
import uncompyle6.scanners.scanner36 as scan
|
||||||
|
scanner = scan.Scanner36()
|
||||||
|
|
||||||
global JUMP_OPs
|
global JUMP_OPs
|
||||||
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
||||||
@@ -323,7 +334,7 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
|||||||
codes2 = ( c for c in code_obj2.co_consts if hasattr(c, 'co_consts') )
|
codes2 = ( c for c in code_obj2.co_consts if hasattr(c, 'co_consts') )
|
||||||
|
|
||||||
for c1, c2 in zip(codes1, codes2):
|
for c1, c2 in zip(codes1, codes2):
|
||||||
cmp_code_objects(version, c1, c2, name=name)
|
cmp_code_objects(version, is_pypy, c1, c2, name=name)
|
||||||
else:
|
else:
|
||||||
# all other members must be equal
|
# all other members must be equal
|
||||||
if getattr(code_obj1, member) != getattr(code_obj2, member):
|
if getattr(code_obj1, member) != getattr(code_obj2, member):
|
||||||
@@ -333,13 +344,8 @@ def cmp_code_objects(version, code_obj1, code_obj2, name=''):
|
|||||||
|
|
||||||
class Token(scanner.Token):
|
class Token(scanner.Token):
|
||||||
"""Token class with changed semantics for 'cmp()'."""
|
"""Token class with changed semantics for 'cmp()'."""
|
||||||
|
|
||||||
def __cmp__(self, o):
|
def __cmp__(self, o):
|
||||||
t = self.type # shortcut
|
t = self.type # shortcut
|
||||||
loads = ('LOAD_NAME', 'LOAD_GLOBAL', 'LOAD_CONST')
|
|
||||||
if t in loads and o.type in loads:
|
|
||||||
if self.pattr == 'None' and o.pattr is None:
|
|
||||||
return 0
|
|
||||||
if t == 'BUILD_TUPLE_0' and o.type == 'LOAD_CONST' and o.pattr == ():
|
if t == 'BUILD_TUPLE_0' and o.type == 'LOAD_CONST' and o.pattr == ():
|
||||||
return 0
|
return 0
|
||||||
if t == 'COME_FROM' == o.type:
|
if t == 'COME_FROM' == o.type:
|
||||||
@@ -370,14 +376,14 @@ def compare_code_with_srcfile(pyc_filename, src_filename):
|
|||||||
% (PYTHON_MAGIC_INT, magic_int))
|
% (PYTHON_MAGIC_INT, magic_int))
|
||||||
return msg
|
return msg
|
||||||
code_obj2 = load_file(src_filename)
|
code_obj2 = load_file(src_filename)
|
||||||
cmp_code_objects(version, code_obj1, code_obj2)
|
cmp_code_objects(version, is_pypy, code_obj1, code_obj2)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def compare_files(pyc_filename1, pyc_filename2):
|
def compare_files(pyc_filename1, pyc_filename2):
|
||||||
"""Compare two .pyc files."""
|
"""Compare two .pyc files."""
|
||||||
version, timestamp, magic_int1, code_obj1, is_pypy = uncompyle6.load_module(pyc_filename1)
|
version, timestamp, magic_int1, code_obj1, is_pypy = uncompyle6.load_module(pyc_filename1)
|
||||||
version, timestamp, magic_int2, code_obj2, is_pypy = uncompyle6.load_module(pyc_filename2)
|
version, timestamp, magic_int2, code_obj2, is_pypy = uncompyle6.load_module(pyc_filename2)
|
||||||
cmp_code_objects(version, code_obj1, code_obj2)
|
cmp_code_objects(version, is_pypy, code_obj1, code_obj2)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
t1 = Token('LOAD_CONST', None, 'code_object _expandLang', 52)
|
t1 = Token('LOAD_CONST', None, 'code_object _expandLang', 52)
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
# This file is suitable for sourcing inside bash as
|
# This file is suitable for sourcing inside bash as
|
||||||
# well as importing into Python
|
# well as importing into Python
|
||||||
VERSION='2.7.1'
|
VERSION='2.8.1'
|
||||||
|
Reference in New Issue
Block a user