You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Compare commits
31 Commits
release-2.
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
2eaea447eb | ||
|
287e98b4b1 | ||
|
63e4c9343f | ||
|
eab653afdd | ||
|
7700446bb1 | ||
|
bfd2f77fbc | ||
|
1574bf4e1e | ||
|
2328ca7a55 | ||
|
ccdd37611c | ||
|
2e355b6245 | ||
|
9849f06ff6 | ||
|
0e7da031b2 | ||
|
25dd67a135 | ||
|
1a38d3d9aa | ||
|
de65a2c250 | ||
|
7daec3352c | ||
|
8feb472d51 | ||
|
7a10917857 | ||
|
334f6935b6 | ||
|
aff920d87b | ||
|
6319d33fa0 | ||
|
abb61a4d7d | ||
|
a4c943fe0d | ||
|
0480455ae1 | ||
|
9b7d978944 | ||
|
a6befdee09 | ||
|
d6f7ef4e17 | ||
|
cec80e696c | ||
|
0826129112 | ||
|
7beaa9f36c | ||
|
78ef16e4d7 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,7 +1,7 @@
|
||||
*.pyo
|
||||
*.pyc
|
||||
*_dis
|
||||
*~
|
||||
*.pyc
|
||||
/.cache
|
||||
/.eggs
|
||||
/.python-version
|
||||
@@ -13,5 +13,6 @@
|
||||
/nose-*.egg
|
||||
/tmp
|
||||
/uncompyle6.egg-info
|
||||
/unpyc
|
||||
__pycache__
|
||||
build
|
||||
|
169
ChangeLog
169
ChangeLog
@@ -1,6 +1,173 @@
|
||||
2016-11-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/version.py: Get ready for release 2.9.4
|
||||
|
||||
2016-11-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Update unpyc3 info.
|
||||
|
||||
2016-11-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_grammar.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse31.py, uncompyle6/parsers/parse32.py,
|
||||
uncompyle6/semantics/make_function.py: Clean up annotation grammar a
|
||||
little
|
||||
|
||||
2016-11-01 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug31/04_def_annotate.py,
|
||||
uncompyle6/semantics/make_function.py: Full Python 3 annotations
|
||||
|
||||
2016-10-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .gitignore, README.rst, test/simple_source/def/03_class_method.py:
|
||||
Note github unpyc3 and.. - Add source to bytecode_2.2/03_class_method.pyc - more ignore
|
||||
|
||||
2016-10-30 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py: More source-code line
|
||||
indention in make_function.. and remove Python 3 situations from make_function2()
|
||||
|
||||
2016-10-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/pysource.py: More annotation processing in to
|
||||
make_function Move return-value annotation determination from n_mkfunc_annotate to
|
||||
make_function_annotate which is where other kinds of annotation
|
||||
handling will also need to be done.
|
||||
|
||||
2016-10-29 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/make_function.py,
|
||||
uncompyle6/semantics/parser_error.py,
|
||||
uncompyle6/semantics/pysource.py: Break out make_function() into its
|
||||
own file. It is already too complex and will get worse in Python 3.6. Note: make_function in fragments.py is still inside and probably
|
||||
needs fixup.
|
||||
|
||||
2016-10-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_grammar.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse31.py, uncompyle6/parsers/parse32.py,
|
||||
uncompyle6/parsers/parse35.py, uncompyle6/semantics/pysource.py:
|
||||
More complete annotate handling Still have a bit of work to do though.
|
||||
|
||||
2016-10-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_grammar.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse32.py, uncompyle6/parsers/parse33.py,
|
||||
uncompyle6/parsers/parse34.py, uncompyle6/semantics/pysource.py:
|
||||
Expand annotate return to Python 3.4
|
||||
|
||||
2016-10-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_grammar.py, uncompyle6/parsers/parse31.py,
|
||||
uncompyle6/parsers/parse32.py, uncompyle6/semantics/pysource.py:
|
||||
Expand annotate handling to 3.3 (and possibly 3.2) - DRY Python 3.1-3.3 grammar a little
|
||||
|
||||
2016-10-28 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parser.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse31.py, uncompyle6/parsers/parse32.py,
|
||||
uncompyle6/parsers/parse33.py, uncompyle6/parsers/parse35.py: Split
|
||||
out 3.1-3.3 parsers from parser3.py This is anticipation of extending annotation to Python 3.2+
|
||||
|
||||
2016-10-27 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug31/04_def_annotate.py,
|
||||
test/simple_source/bug31/04_def_attr.py,
|
||||
uncompyle6/parsers/parse31.py, uncompyle6/semantics/pysource.py:
|
||||
Clean and fix Python 3 annotate arg return
|
||||
|
||||
2016-10-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Dependencies stay within 2nd semantic level
|
||||
|
||||
2016-10-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.9.3
|
||||
|
||||
2016-10-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/bug31/04_def_attr.py,
|
||||
uncompyle6/parsers/parse31.py, uncompyle6/scanner.py,
|
||||
uncompyle6/semantics/pysource.py: Start to attack Python 3.1 def()
|
||||
-> xx construct Start to localize make_function routines by Python version
|
||||
|
||||
2016-10-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse31.py: Split
|
||||
out Python 3.1 parser from rest. __pkginfo__.py: use Python 3.1 bytecode fixes
|
||||
|
||||
2016-10-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Handle Python 3.1 "with ... as"
|
||||
statement
|
||||
|
||||
2016-10-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/Makefile: Add python 3.1 bytecode testing
|
||||
|
||||
2016-10-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/stmts/07_withstmt_fn.py,
|
||||
uncompyle6/parsers/parse3.py: Python 3.1 "with" statement bug
|
||||
|
||||
2016-10-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/parsers/parse34.py,
|
||||
uncompyle6/parsers/parse35.py: Python 3.1 compile bug. DRY Python
|
||||
3.x rules ... via inheritance
|
||||
|
||||
2016-10-24 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner3.py: Fix
|
||||
some Python 3.1 bugs
|
||||
|
||||
2016-10-22 Daniel Bradburn <moagstar@gmail.com>
|
||||
|
||||
* : Merge pull request #60 from rocky/buildstring Buildstring
|
||||
|
||||
2016-10-22 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_fstring.py, test/simple_source/bug36/01_fstring.py,
|
||||
uncompyle6/semantics/pysource.py: Move fstring rules inside a 3.6+
|
||||
check
|
||||
|
||||
2016-10-22 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit d6f7ef4e178e04d9a612d3a6c0b77a008732357f Author: rocky
|
||||
<rb@dustyfeet.com> Date: Fri Oct 21 07:40:35 2016 -0400
|
||||
|
||||
2016-10-20 moagstar <moagstar@gmail.com>
|
||||
|
||||
* pytest/test_fstring.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse36.py, uncompyle6/semantics/pysource.py:
|
||||
further work on supporting single and multiple fstring decompilation
|
||||
|
||||
2016-10-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/main.py, uncompyle6/scanners/scanner2.py,
|
||||
uncompyle6/scanners/scanner26.py: DRY Python 2.x unmangle_classname main.py: small typo: Disassembled -> Decompiled
|
||||
|
||||
2016-10-19 moagstar <moagstar@gmail.com>
|
||||
|
||||
* pytest/test_fstring.py, uncompyle6/parsers/parse3.py,
|
||||
uncompyle6/parsers/parse36.py, uncompyle6/semantics/pysource.py:
|
||||
urther work on fstrings for python 3.6 - there is a new opcode
|
||||
build_string which is used to improve fstring performance, but broke
|
||||
the fstring support in uncompyle
|
||||
|
||||
2016-10-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.9.2
|
||||
* uncompyle6/main.py: Change meta data info in uncompyle6: * Show file size of source when possible, i.e. in Python 3.x * Show full information about python interpreter used to decompile
|
||||
|
||||
2016-10-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* ChangeLog, NEWS, __pkginfo__.py, requirements.txt,
|
||||
uncompyle6/version.py: Get ready for release 2.9.2
|
||||
|
||||
2016-10-14 rocky <rb@dustyfeet.com>
|
||||
|
||||
|
21
NEWS
21
NEWS
@@ -1,3 +1,24 @@
|
||||
uncompyle6 2.9.4 2016-11-02
|
||||
|
||||
- Handle Python 3.x function annotations
|
||||
- track def keywoard-parameter line-splitting in source code better
|
||||
- bump min xdis version to mask previous xdis bug
|
||||
|
||||
uncompyle6 2.9.3 2016-10-26
|
||||
|
||||
Release forced by incompatiblity change in xdis 3.2.0.
|
||||
|
||||
- Python 3.1 bugs:
|
||||
* handle "with ... as"
|
||||
* handle "with"
|
||||
* Start handling def (...) -> yy (has bugs still)
|
||||
|
||||
- DRY Python 3.x via inheritance
|
||||
- Python 3.6 work (from Daniel Bradburn)
|
||||
* Handle 3.6 buildstring
|
||||
* Handle 3.6 handle single and multiple fstring better
|
||||
|
||||
|
||||
uncompyle6 2.9.2 2016-10-15
|
||||
|
||||
- use source-code line breaks to assist in where to break
|
||||
|
@@ -20,9 +20,9 @@ Why this?
|
||||
There were a number of decompyle, uncompile, uncompyle2, uncompyle3
|
||||
forks around. All of them came basically from the same code base, and
|
||||
almost all of them no were no longer actively maintained. Only one
|
||||
handled Python 3, and even there, only 3.2. This code pulls these
|
||||
together and moves forward. It also addresses a number of open issues
|
||||
in the previous forks.
|
||||
handled Python 3, and even there, only 3.2 or 3.3 depending on which
|
||||
code is used. This code pulls these together and moves forward. It
|
||||
also addresses a number of open issues in the previous forks.
|
||||
|
||||
What makes this different from other CPython bytecode decompilers?: its
|
||||
ability to deparse just fragments and give source-code information
|
||||
@@ -132,6 +132,7 @@ See Also
|
||||
|
||||
* https://github.com/zrax/pycdc : supports all versions of Python and is written in C++
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only. The above projects use a different decompiling technique what is used here.
|
||||
* https://github.com/figment/unpyc3/ : fork of above, but supports Python 3.3 only. Include some fixes like supporting function annotations
|
||||
* The HISTORY_ file.
|
||||
|
||||
.. |downloads| image:: https://img.shields.io/pypi/dd/uncompyle6.svg
|
||||
|
@@ -37,8 +37,8 @@ entry_points={
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.4.0',
|
||||
'xdis >= 3.1.0']
|
||||
install_requires = ['spark-parser >= 1.4.0, < 1.5.0',
|
||||
'xdis >= 3.2.2, < 3.3.0']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
@@ -21,9 +21,13 @@ def expressions(draw):
|
||||
'container',
|
||||
'self.attribute',
|
||||
'self.method()',
|
||||
'sorted(items, key=lambda x: x.name)',
|
||||
'func(*args, **kwargs)',
|
||||
'text or default',
|
||||
# These expressions are failing, I think these are control
|
||||
# flow problems rather than problems with FORMAT_VALUE,
|
||||
# however I need to confirm this...
|
||||
#'sorted(items, key=lambda x: x.name)',
|
||||
#'func(*args, **kwargs)',
|
||||
#'text or default',
|
||||
#'43 if life_the_universe and everything else None'
|
||||
)))
|
||||
|
||||
|
||||
@@ -119,6 +123,8 @@ def test_format_specifiers(format_specifier):
|
||||
|
||||
|
||||
def run_test(text):
|
||||
hypothesis.assume(len(text))
|
||||
hypothesis.assume("f'{" in text)
|
||||
expr = text + '\n'
|
||||
code = compile(expr, '<string>', 'single')
|
||||
deparsed = deparse_code(PYTHON_VERSION, code, compile_mode='single')
|
||||
@@ -136,8 +142,8 @@ def test_uncompyle_fstring(fstring):
|
||||
|
||||
@pytest.mark.skipif(PYTHON_VERSION < 3.6, reason='need at least python 3.6')
|
||||
@pytest.mark.parametrize('fstring', [
|
||||
#"f'{abc}{abc!s}'",
|
||||
"f'{abc!s}'",
|
||||
"f'{abc}{abc!s}'",
|
||||
"f'{abc}0'",
|
||||
])
|
||||
def test_uncompyle_direct(fstring):
|
||||
"""useful for debugging"""
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import pytest, re
|
||||
import re
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
||||
from uncompyle6.parser import get_python_parser
|
||||
from uncompyle6.scanner import get_scanner
|
||||
@@ -16,14 +16,21 @@ def test_grammar():
|
||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
expect_lhs = set(['expr1024', 'pos_arg'])
|
||||
unused_rhs = set(['build_list', 'call_function', 'mkfunc', 'mklambda',
|
||||
unused_rhs = set(['build_list', 'call_function', 'mkfunc',
|
||||
'mklambda',
|
||||
'unpack', 'unpack_list'])
|
||||
expect_right_recursive = [['designList', ('designator', 'DUP_TOP', 'designList')]]
|
||||
if PYTHON3:
|
||||
expect_lhs.add('load_genexpr')
|
||||
|
||||
unused_rhs = unused_rhs.union(set("""
|
||||
except_pop_except genexpr classdefdeco2 listcomp
|
||||
""".split()))
|
||||
if 3.0 <= PYTHON_VERSION:
|
||||
expect_lhs.add("annotate_arg")
|
||||
expect_lhs.add("annotate_tuple")
|
||||
unused_rhs.add("mkfunc_annotate")
|
||||
pass
|
||||
else:
|
||||
expect_lhs.add('kwarg')
|
||||
assert expect_lhs == set(lhs)
|
||||
@@ -43,5 +50,6 @@ def test_grammar():
|
||||
check_tokens(tokens, opcode_set)
|
||||
elif PYTHON_VERSION == 3.4:
|
||||
ignore_set.add('LOAD_CLASSNAME')
|
||||
ignore_set.add('STORE_LOCALS')
|
||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||
check_tokens(tokens, opcode_set)
|
||||
|
@@ -62,7 +62,7 @@ check-bytecode-2:
|
||||
|
||||
#: Check deparsing bytecode 3.x only
|
||||
check-bytecode-3:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.2 --bytecode-3.3 \
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.1 --bytecode-3.2 --bytecode-3.3 \
|
||||
--bytecode-3.4 --bytecode-3.5 --bytecode-pypy3.2
|
||||
|
||||
#: Check deparsing bytecode that works running Python 2 and Python 3
|
||||
|
Binary file not shown.
BIN
test/bytecode_3.1/04_def_annotate.pyc
Normal file
BIN
test/bytecode_3.1/04_def_annotate.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.1/04_def_attr.pyc-notyet
Normal file
BIN
test/bytecode_3.1/04_def_attr.pyc-notyet
Normal file
Binary file not shown.
BIN
test/bytecode_3.1/04_withas.pyc
Normal file
BIN
test/bytecode_3.1/04_withas.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.1/07_withstmt_fn.pyc
Normal file
BIN
test/bytecode_3.1/07_withstmt_fn.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4/04_def_annotate.pyc
Normal file
BIN
test/bytecode_3.4/04_def_annotate.pyc
Normal file
Binary file not shown.
Binary file not shown.
10
test/simple_source/bug31/04_def_annotate.py
Normal file
10
test/simple_source/bug31/04_def_annotate.py
Normal file
@@ -0,0 +1,10 @@
|
||||
# Python 3 annotations
|
||||
|
||||
def foo(a, b: 'annotating b', c: int) -> float:
|
||||
print(a + b + c)
|
||||
|
||||
# Python 3.1 _pyio.py uses the -> "IOBase" annotation
|
||||
def open(file, mode = "r", buffering = None,
|
||||
encoding = None, errors = None,
|
||||
newline = None, closefd = True) -> "IOBase":
|
||||
return text
|
@@ -1,3 +1,5 @@
|
||||
var1 = 'x'
|
||||
var2 = 'y'
|
||||
print(f'interpolate {var1} strings {var2!r} {var2!s} py36')
|
||||
print(f'{abc}0')
|
||||
print(f'{abc}{abc!s}')
|
||||
|
13
test/simple_source/def/03_class_method.py
Normal file
13
test/simple_source/def/03_class_method.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# From Decompyle++
|
||||
# File: 22_class_method.pyc (Python 2.2)
|
||||
# An old-style Python class.
|
||||
|
||||
class MyClass:
|
||||
|
||||
def method(self, i):
|
||||
if i is 5:
|
||||
print 'five'
|
||||
elif not (i is 2):
|
||||
print 'not two'
|
||||
else:
|
||||
print '2'
|
@@ -1,4 +1,4 @@
|
||||
# Python 2.6 has a truly weird way of handling with here.
|
||||
# Python 2.6 has a truly weird way of handling "with" here.
|
||||
# added rule for 2.6
|
||||
# setupwith ::= DUP_TOP LOAD_ATTR ROT_TWO LOAD_ATTR CALL_FUNCTION_0 POP_TOP
|
||||
|
||||
|
@@ -1,18 +1,19 @@
|
||||
from __future__ import print_function
|
||||
import datetime, os, sys
|
||||
|
||||
from uncompyle6 import verify, PYTHON_VERSION, IS_PYPY
|
||||
from uncompyle6 import verify, IS_PYPY
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.disas import check_object_path
|
||||
from uncompyle6.semantics import pysource
|
||||
from uncompyle6.parser import ParserError
|
||||
from uncompyle6.version import VERSION
|
||||
|
||||
from xdis.load import load_module
|
||||
|
||||
def uncompyle(
|
||||
version, co, out=None, showasm=False, showast=False,
|
||||
bytecode_version, co, out=None, showasm=False, showast=False,
|
||||
timestamp=None, showgrammar=False, code_objects={},
|
||||
is_pypy=False, magic_int=None):
|
||||
source_size=None, is_pypy=False, magic_int=None):
|
||||
"""
|
||||
ingests and deparses a given code block 'co'
|
||||
"""
|
||||
@@ -22,21 +23,26 @@ def uncompyle(
|
||||
real_out = out or sys.stdout
|
||||
co_pypy_str = 'PyPy ' if is_pypy else ''
|
||||
run_pypy_str = 'PyPy ' if IS_PYPY else ''
|
||||
print('# %sPython bytecode %s%s disassembled from %sPython %s' %
|
||||
(co_pypy_str, version,
|
||||
print('# uncompyle6 version %s\n'
|
||||
'# %sPython bytecode %s%s\n# Decompiled from: %sPython %s' %
|
||||
(VERSION, co_pypy_str, bytecode_version,
|
||||
" (%d)" % magic_int if magic_int else "",
|
||||
run_pypy_str, PYTHON_VERSION),
|
||||
file=real_out)
|
||||
run_pypy_str, '\n# '.join(sys.version.split('\n'))),
|
||||
file=real_out)
|
||||
if co.co_filename:
|
||||
print('# Embedded file name: %s' % co.co_filename,
|
||||
file=real_out)
|
||||
if timestamp:
|
||||
print('# Compiled at: %s' % datetime.datetime.fromtimestamp(timestamp),
|
||||
file=real_out)
|
||||
if source_size:
|
||||
print('# Size of source mod 2**32: %d bytes' % source_size,
|
||||
file=real_out)
|
||||
|
||||
try:
|
||||
pysource.deparse_code(version, co, out, showasm, showast, showgrammar,
|
||||
code_objects=code_objects, is_pypy=is_pypy)
|
||||
pysource.deparse_code(bytecode_version, co, out, showasm, showast,
|
||||
showgrammar, code_objects=code_objects,
|
||||
is_pypy=is_pypy)
|
||||
except pysource.SourceWalkerError as e:
|
||||
# deparsing failed
|
||||
print("\n")
|
||||
@@ -66,7 +72,8 @@ def uncompyle_file(filename, outstream=None, showasm=False, showast=False,
|
||||
is_pypy=is_pypy, magic_int=magic_int)
|
||||
else:
|
||||
uncompyle(version, co, outstream, showasm, showast,
|
||||
timestamp, showgrammar, code_objects=code_objects,
|
||||
timestamp, showgrammar,
|
||||
code_objects=code_objects, source_size=source_size,
|
||||
is_pypy=is_pypy, magic_int=magic_int)
|
||||
co = None
|
||||
|
||||
|
@@ -42,13 +42,22 @@ class PythonParser(GenericASTBuilder):
|
||||
return
|
||||
|
||||
def add_unique_rules(self, rules, customize):
|
||||
"""Add rules to grammar
|
||||
"""Add rules (a list of string) to grammar
|
||||
"""
|
||||
for rule in rules:
|
||||
if len(rule) == 0:
|
||||
continue
|
||||
opname = rule.split('::=')[0].strip()
|
||||
self.add_unique_rule(rule, opname, 0, customize)
|
||||
return
|
||||
|
||||
def add_unique_doc_rules(self, rules_str, customize):
|
||||
"""Add rules (a docstring-like list of rules) to grammar
|
||||
"""
|
||||
rules = [r.strip() for r in rules_str.split("\n")]
|
||||
self.add_unique_rules(rules, customize)
|
||||
return
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
Remove recursive references to allow garbage
|
||||
@@ -613,20 +622,23 @@ def get_python_parser(
|
||||
else:
|
||||
import uncompyle6.parsers.parse3 as parse3
|
||||
if version == 3.1:
|
||||
import uncompyle6.parsers.parse31 as parse31
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python31Parser(debug_parser)
|
||||
p = parse31.Python31Parser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python31ParserSingle(debug_parser)
|
||||
p = parse31.Python31ParserSingle(debug_parser)
|
||||
elif version == 3.2:
|
||||
import uncompyle6.parsers.parse32 as parse32
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python32Parser(debug_parser)
|
||||
p = parse32.Python32Parser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python32ParserSingle(debug_parser)
|
||||
p = parse32.Python32ParserSingle(debug_parser)
|
||||
elif version == 3.3:
|
||||
import uncompyle6.parsers.parse33 as parse33
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python33Parser(debug_parser)
|
||||
p = parse33.Python33Parser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python33ParserSingle(debug_parser)
|
||||
p = parse33.Python33ParserSingle(debug_parser)
|
||||
elif version == 3.4:
|
||||
import uncompyle6.parsers.parse34 as parse34
|
||||
if compile_mode == 'exec':
|
||||
|
@@ -246,6 +246,25 @@ class Python3Parser(PythonParser):
|
||||
c_stmts_opt34 ::= JUMP_BACK JUMP_ABSOLUTE c_stmts_opt
|
||||
"""
|
||||
|
||||
|
||||
def p_def_annotations3(self, args):
|
||||
"""
|
||||
# Annotated functions
|
||||
stmt ::= funcdef_annotate
|
||||
funcdef_annotate ::= mkfunc_annotate designator
|
||||
|
||||
# This has the annotation value.
|
||||
# LOAD_NAME is used in an annotation type like
|
||||
# int, float, str
|
||||
annotate_arg ::= LOAD_NAME
|
||||
# LOAD_CONST is used in an annotation string
|
||||
annotate_arg ::= LOAD_CONST
|
||||
|
||||
# This stores the tuple of parameter names
|
||||
# that have been annotated
|
||||
annotate_tuple ::= LOAD_CONST
|
||||
"""
|
||||
|
||||
def p_come_from3(self, args):
|
||||
"""
|
||||
opt_come_from_except ::= COME_FROM_EXCEPT
|
||||
@@ -360,10 +379,9 @@ class Python3Parser(PythonParser):
|
||||
# Python 3.4+
|
||||
expr ::= LOAD_CLASSDEREF
|
||||
|
||||
binary_subscr2 ::= expr expr DUP_TOP_TWO BINARY_SUBSCR
|
||||
# Python3 drops slice0..slice3
|
||||
|
||||
# In Python 2, DUP_TOP_TWO is DUP_TOPX_2
|
||||
binary_subscr2 ::= expr expr DUP_TOP_TWO BINARY_SUBSCR
|
||||
'''
|
||||
|
||||
@staticmethod
|
||||
@@ -500,7 +518,6 @@ class Python3Parser(PythonParser):
|
||||
load_attr ::= expr LOOKUP_METHOD
|
||||
call_function ::= expr CALL_METHOD
|
||||
"""
|
||||
saw_format_value = False
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
@@ -513,13 +530,6 @@ class Python3Parser(PythonParser):
|
||||
assign2_pypy ::= expr expr designator designator
|
||||
""", nop_func)
|
||||
continue
|
||||
elif opname == 'FORMAT_VALUE':
|
||||
# Python 3.6+
|
||||
self.addRule("""
|
||||
expr ::= fstring_expr
|
||||
fstring_expr ::= expr FORMAT_VALUE
|
||||
""", nop_func)
|
||||
|
||||
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||
self.custom_classfunc_rule(opname, token, customize)
|
||||
@@ -542,14 +552,6 @@ class Python3Parser(PythonParser):
|
||||
if opname_base == 'BUILD_TUPLE':
|
||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
if opname_base == 'BUILD_LIST' and saw_format_value:
|
||||
format_or_str_n = "formatted_value_or_str_%s" % v
|
||||
self.addRule("""
|
||||
expr ::= joined_str
|
||||
joined_str ::= LOAD_CONST LOAD_ATTR %s CALL_FUNCTION_1
|
||||
%s ::= %s%s
|
||||
""" % (format_or_str_n, format_or_str_n, ("formatted_value_or_str " *v), opname),
|
||||
nop_func)
|
||||
|
||||
elif opname == 'LOOKUP_METHOD':
|
||||
# A PyPy speciality - DRY with parse2
|
||||
@@ -593,7 +595,7 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (opname, kvlist_n)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base in ('UNPACK_EX'):
|
||||
elif opname_base in ('UNPACK_EX',):
|
||||
before_count, after_count = token.attr
|
||||
rule = 'unpack ::= ' + opname + ' designator' * (before_count + after_count + 1)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
@@ -687,54 +689,11 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
return
|
||||
|
||||
|
||||
class Python31Parser(Python3Parser):
|
||||
|
||||
def p_31(self, args):
|
||||
"""
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
"""
|
||||
|
||||
class Python32Parser(Python3Parser):
|
||||
|
||||
def p_32(self, args):
|
||||
"""
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
"""
|
||||
|
||||
class Python33Parser(Python3Parser):
|
||||
def p_33(self, args):
|
||||
"""
|
||||
# Store locals is only in Python 3.0 to 3.3
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
|
||||
# Python 3.3 adds yield from.
|
||||
expr ::= yield_from
|
||||
yield_from ::= expr expr YIELD_FROM
|
||||
"""
|
||||
|
||||
class Python3ParserSingle(Python3Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
class Python31ParserSingle(Python31Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
class Python32ParserSingle(Python32Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
class Python33ParserSingle(Python33Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
def info(args):
|
||||
# Check grammar
|
||||
# Should also add a way to dump grammar
|
||||
p = Python3Parser()
|
||||
if len(args) > 0:
|
||||
arg = args[0]
|
||||
@@ -742,11 +701,15 @@ def info(args):
|
||||
from uncompyle6.parser.parse35 import Python35Parser
|
||||
p = Python35Parser()
|
||||
elif arg == '3.3':
|
||||
from uncompyle6.parser.parse33 import Python33Parser
|
||||
p = Python33Parser()
|
||||
elif arg == '3.2':
|
||||
from uncompyle6.parser.parse32 import Python32Parser
|
||||
p = Python32Parser()
|
||||
p.checkGrammar()
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] == 'dump':
|
||||
print('-' * 50)
|
||||
p.dumpGrammar()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
51
uncompyle6/parsers/parse31.py
Normal file
51
uncompyle6/parsers/parse31.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3.2 for Python 3.1.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse32 import Python32Parser
|
||||
|
||||
class Python31Parser(Python32Parser):
|
||||
|
||||
def p_31(self, args):
|
||||
"""
|
||||
binary_subscr2 ::= expr expr DUP_TOPX BINARY_SUBSCR
|
||||
|
||||
setupwith ::= DUP_TOP LOAD_ATTR store LOAD_ATTR CALL_FUNCTION_0 POP_TOP
|
||||
setupwithas ::= DUP_TOP LOAD_ATTR store LOAD_ATTR CALL_FUNCTION_0 store
|
||||
withstmt ::= expr setupwith SETUP_FINALLY
|
||||
suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_FINALLY
|
||||
load del_stmt WITH_CLEANUP END_FINALLY
|
||||
|
||||
# Keeps Python 3.1 withas desigator in the same position as it is in other version
|
||||
setupwithas31 ::= setupwithas SETUP_FINALLY load del_stmt
|
||||
|
||||
withasstmt ::= expr setupwithas31 designator
|
||||
suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM_FINALLY
|
||||
load del_stmt WITH_CLEANUP END_FINALLY
|
||||
|
||||
store ::= STORE_FAST
|
||||
store ::= STORE_NAME
|
||||
load ::= LOAD_FAST
|
||||
load ::= LOAD_NAME
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python31Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
# Check that there are 2 annotated params?
|
||||
# rule = ('mkfunc2 ::= %s%sEXTENDED_ARG %s' %
|
||||
# ('pos_arg ' * (args_pos), 'kwargs ' * (annotate_args-1), opname))
|
||||
rule = ('mkfunc_annotate ::= %s%sannotate_tuple LOAD_CONST EXTENDED_ARG %s' %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
class Python31ParserSingle(Python31Parser, PythonParserSingle):
|
||||
pass
|
35
uncompyle6/parsers/parse32.py
Normal file
35
uncompyle6/parsers/parse32.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3 for Python 3.2.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
|
||||
class Python32Parser(Python3Parser):
|
||||
def p_32on(self, args):
|
||||
"""
|
||||
# In Python 3.2+, DUP_TOPX is DUP_TOP_TWO
|
||||
binary_subscr2 ::= expr expr DUP_TOP_TWO BINARY_SUBSCR
|
||||
stmt ::= store_locals
|
||||
store_locals ::= LOAD_FAST STORE_LOCALS
|
||||
"""
|
||||
pass
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python32Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
# Check that there are 2 annotated params?
|
||||
rule = (('mkfunc_annotate ::= %s%sannotate_tuple '
|
||||
'LOAD_CONST LOAD_CONST EXTENDED_ARG %s') %
|
||||
(('pos_arg ' * (args_pos)),
|
||||
('annotate_arg ' * (annotate_args-1)), opname))
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
|
||||
|
||||
class Python32ParserSingle(Python32Parser, PythonParserSingle):
|
||||
pass
|
20
uncompyle6/parsers/parse33.py
Normal file
20
uncompyle6/parsers/parse33.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python 3.2 for Python 3.3.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse32 import Python32Parser
|
||||
|
||||
class Python33Parser(Python32Parser):
|
||||
|
||||
def p_33on(self, args):
|
||||
"""
|
||||
# Python 3.3+ adds yield from.
|
||||
expr ::= yield_from
|
||||
yield_from ::= expr expr YIELD_FROM
|
||||
"""
|
||||
|
||||
class Python33ParserSingle(Python33Parser, PythonParserSingle):
|
||||
pass
|
@@ -1,13 +1,13 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python3 for Python 3.4.2.
|
||||
spark grammar differences over Python 3.3 for Python 3.4
|
||||
"""
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
from uncompyle6.parsers.parse33 import Python33Parser
|
||||
|
||||
class Python34Parser(Python3Parser):
|
||||
class Python34Parser(Python33Parser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python34Parser, self).__init__(debug_parser)
|
||||
@@ -28,12 +28,6 @@ class Python34Parser(Python3Parser):
|
||||
iflaststmt ::= testexpr c_stmts_opt34
|
||||
c_stmts_opt34 ::= JUMP_BACK JUMP_ABSOLUTE c_stmts_opt
|
||||
|
||||
# Python 3.3 added "yield from." Do it the same way as in
|
||||
# 3.3
|
||||
|
||||
expr ::= yield_from
|
||||
yield_from ::= expr expr YIELD_FROM
|
||||
|
||||
# Is this 3.4 only?
|
||||
yield_from ::= expr GET_ITER LOAD_CONST YIELD_FROM
|
||||
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# Copyright (c) 2016 Rocky Bernstein
|
||||
"""
|
||||
spark grammar differences over Python3 for Python 3.5.
|
||||
spark grammar differences over Python 3.4 for Python 3.5.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parsers.parse3 import Python3Parser
|
||||
from uncompyle6.parsers.parse34 import Python34Parser
|
||||
|
||||
class Python35Parser(Python3Parser):
|
||||
class Python35Parser(Python34Parser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python35Parser, self).__init__(debug_parser)
|
||||
|
@@ -16,16 +16,37 @@ class Python36Parser(Python35Parser):
|
||||
|
||||
def p_36misc(self, args):
|
||||
"""
|
||||
formatted_value ::= LOAD_FAST FORMAT_VALUE
|
||||
str ::= LOAD_CONST
|
||||
joined_str ::= LOAD_CONST LOAD_ATTR format_value_or_strs
|
||||
BUILD_LIST CALL_FUNCTION
|
||||
format_value_or_strs ::= format_value_or_strs format_value_or_str
|
||||
format_value_or_strs ::= format_value_or_str
|
||||
format_value_or_str ::= format_value
|
||||
format_value_or_str ::= str
|
||||
fstring_multi ::= fstring_expr_or_strs BUILD_STRING
|
||||
fstring_expr_or_strs ::= fstring_expr_or_strs fstring_expr_or_str
|
||||
fstring_expr_or_strs ::= fstring_expr_or_str
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python36Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
if opname == 'FORMAT_VALUE':
|
||||
rules_str = """
|
||||
expr ::= fstring_single
|
||||
fstring_single ::= expr FORMAT_VALUE
|
||||
"""
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
elif opname == 'BUILD_STRING':
|
||||
v = token.attr
|
||||
fstring_expr_or_str_n = "fstring_expr_or_str_%s" % v
|
||||
rules_str = """
|
||||
expr ::= fstring_expr
|
||||
fstring_expr ::= expr FORMAT_VALUE
|
||||
str ::= LOAD_CONST
|
||||
fstring_expr_or_str ::= fstring_expr
|
||||
fstring_expr_or_str ::= str
|
||||
|
||||
expr ::= fstring_multi
|
||||
fstring_multi ::= %s BUILD_STRING
|
||||
%s ::= %sBUILD_STRING
|
||||
""" % (fstring_expr_or_str_n, fstring_expr_or_str_n, "fstring_expr_or_str " * v)
|
||||
self.add_unique_doc_rules(rules_str, customize)
|
||||
|
||||
class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
@@ -20,7 +20,7 @@ from uncompyle6.scanners.tok import Token
|
||||
# The byte code versions we support
|
||||
PYTHON_VERSIONS = (1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.1, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||
|
||||
# FIXME: DRY
|
||||
if PYTHON3:
|
||||
@@ -219,6 +219,16 @@ class Scanner(object):
|
||||
yield start
|
||||
start += self.op_size(self.code[start])
|
||||
|
||||
def op_size(self, op):
|
||||
"""
|
||||
Return size of operator with its arguments
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
else:
|
||||
return 3
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
"""
|
||||
Go through passed offsets, filtering ifs
|
||||
|
@@ -22,7 +22,6 @@ Finally we save token information.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import inspect
|
||||
from collections import namedtuple
|
||||
from array import array
|
||||
|
||||
@@ -39,6 +38,36 @@ class Scanner2(scan.Scanner):
|
||||
# For <2.5 it is <generator expression>
|
||||
self.genexpr_name = '<genexpr>';
|
||||
|
||||
@staticmethod
|
||||
def unmangle_name(name, classname):
|
||||
"""Remove __ from the end of _name_ if it starts with __classname__
|
||||
return the "unmangled" name.
|
||||
"""
|
||||
if name.startswith(classname) and name[-2:] != '__':
|
||||
return name[len(classname) - 2:]
|
||||
return name
|
||||
|
||||
@classmethod
|
||||
def unmangle_code_names(self, co, classname):
|
||||
"""Remove __ from the end of _name_ if it starts with __classname__
|
||||
return the "unmangled" name.
|
||||
"""
|
||||
if classname:
|
||||
classname = '_' + classname.lstrip('_') + '__'
|
||||
|
||||
free = [ self.unmangle_name(name, classname)
|
||||
for name in (co.co_cellvars + co.co_freevars) ]
|
||||
names = [ self.unmangle_name(name, classname)
|
||||
for name in co.co_names ]
|
||||
varnames = [ self.unmangle_name(name, classname)
|
||||
for name in co.co_varnames ]
|
||||
else:
|
||||
free = co.co_cellvars + co.co_freevars
|
||||
names = co.co_names
|
||||
varnames = co.co_varnames
|
||||
return free, names, varnames
|
||||
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
Pick out tokens from an uncompyle6 code object, and transform them,
|
||||
@@ -82,22 +111,7 @@ class Scanner2(scan.Scanner):
|
||||
self.build_lines_data(co, n)
|
||||
self.build_prev_op(n)
|
||||
|
||||
# class and names
|
||||
if classname:
|
||||
classname = '_' + classname.lstrip('_') + '__'
|
||||
|
||||
def unmangle(name):
|
||||
if name.startswith(classname) and name[-2:] != '__':
|
||||
return name[len(classname) - 2:]
|
||||
return name
|
||||
|
||||
free = [ unmangle(name) for name in (co.co_cellvars + co.co_freevars) ]
|
||||
names = [ unmangle(name) for name in co.co_names ]
|
||||
varnames = [ unmangle(name) for name in co.co_varnames ]
|
||||
else:
|
||||
free = co.co_cellvars + co.co_freevars
|
||||
names = co.co_names
|
||||
varnames = co.co_varnames
|
||||
free, names, varnames = self.unmangle_code_names(co, classname)
|
||||
self.names = names
|
||||
|
||||
# Scan for assertions. Later we will
|
||||
@@ -281,16 +295,6 @@ class Scanner2(scan.Scanner):
|
||||
print()
|
||||
return tokens, customize
|
||||
|
||||
def op_size(self, op):
|
||||
"""
|
||||
Return size of operator with its arguments
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT and op not in self.opc.hasArgumentExtended:
|
||||
return 1
|
||||
else:
|
||||
return 3
|
||||
|
||||
def setup_code(self, co):
|
||||
"""
|
||||
Creates Python-independent bytecode structure (byte array) in
|
||||
@@ -921,17 +925,3 @@ class Scanner2(scan.Scanner):
|
||||
instr_offsets = filtered
|
||||
filtered = []
|
||||
return instr_offsets
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
if PYTHON_VERSION >= 2.3:
|
||||
co = inspect.currentframe().f_code
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
tokens, customize = Scanner2(PYTHON_VERSION).ingest(co)
|
||||
for t in tokens:
|
||||
print(t)
|
||||
else:
|
||||
print("Need to be Python 3.2 or greater to demo; I am %s." %
|
||||
PYTHON_VERSION)
|
||||
pass
|
||||
|
@@ -110,22 +110,7 @@ class Scanner26(scan.Scanner2):
|
||||
self.build_lines_data(co, n)
|
||||
self.build_prev_op(n)
|
||||
|
||||
# class and names
|
||||
if classname:
|
||||
classname = '_' + classname.lstrip('_') + '__'
|
||||
|
||||
def unmangle(name):
|
||||
if name.startswith(classname) and name[-2:] != '__':
|
||||
return name[len(classname) - 2:]
|
||||
return name
|
||||
|
||||
free = [ unmangle(name) for name in (co.co_cellvars + co.co_freevars) ]
|
||||
names = [ unmangle(name) for name in co.co_names ]
|
||||
varnames = [ unmangle(name) for name in co.co_varnames ]
|
||||
else:
|
||||
free = co.co_cellvars + co.co_freevars
|
||||
names = co.co_names
|
||||
varnames = co.co_varnames
|
||||
free, names, varnames = self.unmangle_code_names(co, classname)
|
||||
self.names = names
|
||||
|
||||
codelen = len(self.code)
|
||||
|
@@ -249,7 +249,7 @@ class Scanner3(Scanner):
|
||||
opname = '%s_N%d' % (opname, name_pair_args)
|
||||
pass
|
||||
if annotate_args > 0:
|
||||
opname = '%s_A_%d' % [opname, annotate_args]
|
||||
opname = '%s_A_%d' % (opname, annotate_args)
|
||||
pass
|
||||
opname = '%s_%d' % (opname, pos_args)
|
||||
pattr = ("%d positional, %d keyword pair, %d annotated" %
|
||||
@@ -286,6 +286,7 @@ class Scanner3(Scanner):
|
||||
pattr = "%d before vararg, %d after" % (before_args, after_args)
|
||||
argval = (before_args, after_args)
|
||||
opname = '%s_%d+%d' % (opname, before_args, after_args)
|
||||
|
||||
elif op == self.opc.JUMP_ABSOLUTE:
|
||||
# Further classify JUMP_ABSOLUTE into backward jumps
|
||||
# which are used in loops, and "CONTINUE" jumps which
|
||||
@@ -388,16 +389,6 @@ class Scanner3(Scanner):
|
||||
for _ in range(self.op_size(op)):
|
||||
self.prev_op.append(offset)
|
||||
|
||||
def op_size(self, op):
|
||||
"""
|
||||
Return size of operator with its arguments
|
||||
for given opcode <op>.
|
||||
"""
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
else:
|
||||
return 3
|
||||
|
||||
def find_jump_targets(self):
|
||||
"""
|
||||
Detect all offsets in a byte code which are jump targets.
|
||||
|
@@ -67,7 +67,9 @@ from uncompyle6.show import (
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.pysource import AST, INDENT_PER_LEVEL, NONE, PRECEDENCE, \
|
||||
ParserError, TABLE_DIRECT, escape, find_all_globals, find_globals, find_none, minint, MAP
|
||||
ParserError, TABLE_DIRECT, escape, find_globals, minint, MAP
|
||||
|
||||
from uncompyle6.semantics.make_function import find_all_globals, find_none
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
@@ -77,8 +79,7 @@ else:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
from spark_parser import GenericASTTraversalPruningException, \
|
||||
DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
||||
from collections import namedtuple
|
||||
NodeInfo = namedtuple("NodeInfo", "node start finish")
|
||||
|
558
uncompyle6/semantics/make_function.py
Normal file
558
uncompyle6/semantics/make_function.py
Normal file
@@ -0,0 +1,558 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions
|
||||
"""
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from uncompyle6.show import maybe_show_ast_param_default
|
||||
|
||||
def find_all_globals(node, globs):
|
||||
"""Find globals in this statement."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_globals(node, globs):
|
||||
"""Find globals in this statement."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_none(node):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
if not n in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.type == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
# FIXME: DRY the below code...
|
||||
|
||||
def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
codeNode=None, annotate_last=-1):
|
||||
"""
|
||||
Dump function defintion, doc string, and function
|
||||
body. This code is specialized for Python 3"""
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
maybe_show_ast_param_default(self.showast, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
annotate_tuple = node[annotate_last]
|
||||
annotate_args = {}
|
||||
|
||||
if (annotate_tuple == 'annotate_tuple'
|
||||
and annotate_tuple[0] in ('LOAD_CONST', 'LOAD_NAME')
|
||||
and isinstance(annotate_tuple[0].attr, tuple)):
|
||||
annotate_tup = annotate_tuple[0].attr
|
||||
i = -1
|
||||
j = annotate_last-1
|
||||
l = -len(node)
|
||||
while j >= l and node[j].type in ('annotate_arg' 'annotate_tuple'):
|
||||
annotate_args[annotate_tup[i]] = (node[j][0].attr,
|
||||
node[j][0] == 'LOAD_CONST')
|
||||
i -= 1
|
||||
j -= 1
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are before kwargs
|
||||
defparams = node[:args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1]
|
||||
indent = self.indent
|
||||
|
||||
if isLambda:
|
||||
self.write("lambda ")
|
||||
else:
|
||||
self.write("(")
|
||||
|
||||
last_line = self.f.getvalue().split("\n")[-1]
|
||||
l = len(last_line)
|
||||
indent = ' ' * l
|
||||
line_number = self.line_number
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
self.write('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
argc += 1
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
suffix = ''
|
||||
for param in paramnames[:i]:
|
||||
self.write(suffix, param)
|
||||
if param in annotate_args:
|
||||
value, string = annotate_args[param]
|
||||
if string:
|
||||
self.write(': "%s"' % value)
|
||||
else:
|
||||
self.write(': %s' % value)
|
||||
suffix = ', '
|
||||
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
param = paramnames[i]
|
||||
self.write(param)
|
||||
if param in annotate_args:
|
||||
self.write(':"%s' % annotate_args[param])
|
||||
self.write('=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
if (line_number != self.line_number):
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
else:
|
||||
suffix = ', '
|
||||
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
i += 1
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
if 8 & code.co_flags: # flag 3 -> keyword args
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(')')
|
||||
if 'return' in annotate_args:
|
||||
value, string = annotate_args['return']
|
||||
if string:
|
||||
self.write(' -> "%s"' % value)
|
||||
else:
|
||||
self.write(' -> %s' % value)
|
||||
|
||||
self.println(":")
|
||||
|
||||
if (len(code.co_consts) > 0 and
|
||||
code.co_consts[0] is not None and not isLambda): # ugly
|
||||
# docstring exists, dump it
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in ((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
returnNone=rn)
|
||||
code._tokens = code._customize = None # save memory
|
||||
|
||||
def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
"""
|
||||
Dump function defintion, doc string, and function body.
|
||||
This code is specialied for Python 2.
|
||||
"""
|
||||
|
||||
# FIXME: call make_function3 if we are self.version >= 3.0
|
||||
# and then simplify the below.
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
if name.startswith('.'):
|
||||
# replace the name with the tuple-string
|
||||
name = self.get_tuple_parameter(ast, name)
|
||||
pass
|
||||
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
maybe_show_ast_param_default(self.showast, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are after kwargs
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if isLambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
break
|
||||
pass
|
||||
|
||||
if 8 & code.co_flags: # flag 3 -> keyword args
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if len(code.co_consts) > 0 and code.co_consts[0] is not None and not isLambda: # ugly
|
||||
# docstring exists, dump it
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in ((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
||||
|
||||
|
||||
def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
"""Dump function definition, doc string, and function body."""
|
||||
|
||||
# FIXME: call make_function3 if we are self.version >= 3.0
|
||||
# and then simplify the below.
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
maybe_show_ast_param_default(self.showast, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
if self.version <= 3.3:
|
||||
# positional args are after kwargs
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
else:
|
||||
# positional args are before kwargs
|
||||
defparams = node[:args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
if self.version != 3.2:
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
if self.version > 3.0:
|
||||
params.append('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
else:
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if isLambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
|
||||
else:
|
||||
if isLambda:
|
||||
self.write("lambda ")
|
||||
else:
|
||||
self.write("(")
|
||||
pass
|
||||
|
||||
last_line = self.f.getvalue().split("\n")[-1]
|
||||
l = len(last_line)
|
||||
indent = ' ' * l
|
||||
line_number = self.line_number
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
self.write('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
argc += 1
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
self.write(", ".join(paramnames[:i]))
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
self.write(paramnames[i] + '=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
if (line_number != self.line_number):
|
||||
suffix = ",\n" + indent
|
||||
line_number = self.line_number
|
||||
else:
|
||||
suffix = ', '
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
continue
|
||||
elif self.version >= 3.4 and n.type != 'kwargs':
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
break
|
||||
else:
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
i += 1
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
if 8 & code.co_flags: # flag 3 -> keyword args
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if len(code.co_consts) > 0 and code.co_consts[0] is not None and not isLambda: # ugly
|
||||
# docstring exists, dump it
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in ((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
11
uncompyle6/semantics/parser_error.py
Normal file
11
uncompyle6/semantics/parser_error.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import uncompyle6.parser as python_parser
|
||||
class ParserError(python_parser.ParserError):
|
||||
def __init__(self, error, tokens):
|
||||
self.error = error # previous exception
|
||||
self.tokens = tokens
|
||||
|
||||
def __str__(self):
|
||||
lines = ['--- This code section failed: ---']
|
||||
lines.extend([str(i) for i in self.tokens])
|
||||
lines.extend( ['', str(self.error)] )
|
||||
return '\n'.join(lines)
|
@@ -79,10 +79,13 @@ from spark_parser import GenericASTTraversal, DEFAULT_DEBUG as PARSER_DEFAULT_DE
|
||||
from uncompyle6.scanner import Code, get_scanner
|
||||
from uncompyle6.scanners.tok import Token, NoneToken
|
||||
import uncompyle6.parser as python_parser
|
||||
from uncompyle6.semantics.make_function import (
|
||||
make_function2, make_function3, make_function3_annotate, find_globals)
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
|
||||
from uncompyle6.show import (
|
||||
maybe_show_asm,
|
||||
maybe_show_ast,
|
||||
maybe_show_ast_param_default,
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
@@ -430,45 +433,6 @@ def is_docstring(node):
|
||||
except:
|
||||
return False
|
||||
|
||||
class ParserError(python_parser.ParserError):
|
||||
def __init__(self, error, tokens):
|
||||
self.error = error # previous exception
|
||||
self.tokens = tokens
|
||||
|
||||
def __str__(self):
|
||||
lines = ['--- This code section failed: ---']
|
||||
lines.extend([str(i) for i in self.tokens])
|
||||
lines.extend( ['', str(self.error)] )
|
||||
return '\n'.join(lines)
|
||||
|
||||
def find_globals(node, globs):
|
||||
"""Find globals in this statement."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_all_globals(node, globs):
|
||||
"""Find globals in this statement."""
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
def find_none(node):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
if not n in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.type == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
class SourceWalkerError(Exception):
|
||||
def __init__(self, errmsg):
|
||||
self.errmsg = errmsg
|
||||
@@ -514,12 +478,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.name = None
|
||||
self.version = version
|
||||
self.is_pypy = is_pypy
|
||||
|
||||
self.customize_for_version(is_pypy, version)
|
||||
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def customize_for_version(is_pypy, version):
|
||||
def customize_for_version(self, is_pypy, version):
|
||||
if is_pypy:
|
||||
########################
|
||||
# PyPy changes
|
||||
@@ -612,28 +575,75 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
'comp_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
})
|
||||
|
||||
|
||||
##########################
|
||||
# Python 3.2 and 3.3 only
|
||||
##########################
|
||||
if 3.2 <= version <= 3.3:
|
||||
if version >= 3.0:
|
||||
TABLE_DIRECT.update({
|
||||
'funcdef_annotate': ( '\n\n%|def %c%c\n', -1, 0),
|
||||
'store_locals': ( '%|# inspect.currentframe().f_locals = __locals__\n', ),
|
||||
})
|
||||
elif version >= 3.4:
|
||||
########################
|
||||
# Python 3.4+ Additions
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
||||
})
|
||||
if version >= 3.6:
|
||||
|
||||
def n_mkfunc_annotate(node):
|
||||
|
||||
if self.version >= 3.3 or node[-2] == 'kwargs':
|
||||
# LOAD_CONST code object ..
|
||||
# LOAD_CONST 'x0' if >= 3.3
|
||||
# EXTENDED_ARG
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-4]
|
||||
elif node[-3] == 'expr':
|
||||
code = node[-3][0]
|
||||
else:
|
||||
# LOAD_CONST code object ..
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-3]
|
||||
|
||||
self.indentMore()
|
||||
annotate_last = -4 if self.version == 3.1 else -5
|
||||
|
||||
# FIXME: handle and pass full annotate args
|
||||
make_function3_annotate(self, node, isLambda=False,
|
||||
codeNode=code, annotate_last=annotate_last)
|
||||
|
||||
if len(self.param_stack) > 1:
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.prune() # stop recursing
|
||||
self.n_mkfunc_annotate = n_mkfunc_annotate
|
||||
|
||||
|
||||
if version >= 3.4:
|
||||
########################
|
||||
# Python 3.6+ Additions
|
||||
# Python 3.4+ Additions
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'fstring_expr': ( "f'{%c%{conversion}}'", 0),
|
||||
})
|
||||
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
||||
})
|
||||
if version >= 3.6:
|
||||
########################
|
||||
# Python 3.6+ Additions
|
||||
#######################
|
||||
TABLE_DIRECT.update({
|
||||
'fstring_expr': ( "{%c%{conversion}}", 0),
|
||||
'fstring_single': ( "f'{%c%{conversion}}'", 0),
|
||||
'fstring_multi': ( "f'%c'", 0),
|
||||
})
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: '!s', 2: '!r', 3: '!a'}
|
||||
def f_conversion(node):
|
||||
node.conversion = FSTRING_CONVERSION_MAP.get(node.data[1].attr, '')
|
||||
|
||||
def n_fstring_expr(node):
|
||||
f_conversion(node)
|
||||
self.default(node)
|
||||
self.n_fstring_expr = n_fstring_expr
|
||||
|
||||
def n_fstring_single(node):
|
||||
f_conversion(node)
|
||||
self.default(node)
|
||||
|
||||
self.n_fstring_single = n_fstring_single
|
||||
|
||||
return
|
||||
|
||||
f = property(lambda s: s.params['f'],
|
||||
@@ -1140,6 +1150,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.indentLess()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def make_function(self, node, isLambda, nested=1,
|
||||
codeNode=None, annotate=None):
|
||||
if self.version >= 3.0:
|
||||
make_function3(self, node, isLambda, nested, codeNode)
|
||||
else:
|
||||
make_function2(self, node, isLambda, nested, codeNode)
|
||||
|
||||
def n_mklambda(self, node):
|
||||
self.make_function(node, isLambda=True, codeNode=node[-2])
|
||||
self.prune() # stop recursing
|
||||
@@ -1886,12 +1903,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node[-2][0].type = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
FSTRING_CONVERSION_MAP = {1: '!s', 2: '!r', 3: '!a'}
|
||||
|
||||
def n_fstring_expr(self, node):
|
||||
node.conversion = self.FSTRING_CONVERSION_MAP.get(node.data[1].attr, '')
|
||||
self.default(node)
|
||||
|
||||
def engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
@@ -2092,190 +2103,6 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# return self.traverse(node[1])
|
||||
raise Exception("Can't find tuple parameter " + name)
|
||||
|
||||
def make_function(self, node, isLambda, nested=1, codeNode=None):
|
||||
"""Dump function defintion, doc string, and function body."""
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
if self.version < 3.0:
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
if name.startswith('.'):
|
||||
# replace the name with the tuple-string
|
||||
name = self.get_tuple_parameter(ast, name)
|
||||
pass
|
||||
pass
|
||||
|
||||
if default:
|
||||
value = self.traverse(default, indent='')
|
||||
maybe_show_ast_param_default(self.showast, name, value)
|
||||
result = '%s=%s' % (name, value)
|
||||
if result[-2:] == '= ': # default was 'LOAD_CONST None'
|
||||
result += 'None'
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
if self.version <= 3.3:
|
||||
# positional args are after kwargs
|
||||
defparams = node[1:args_node.attr[0]+1]
|
||||
else:
|
||||
# positional args are before kwargs
|
||||
defparams = node[:args_node.attr[0]]
|
||||
pos_args, kw_args, annotate_args = args_node.attr
|
||||
else:
|
||||
defparams = node[:args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
if 3.0 <= self.version <= 3.2:
|
||||
lambda_index = -2
|
||||
elif 3.03 <= self.version:
|
||||
lambda_index = -3
|
||||
else:
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
paramnames.reverse(); defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(code._tokens,
|
||||
code._customize,
|
||||
isLambda = isLambda,
|
||||
noneInNames = ('None' in code.co_names))
|
||||
except ParserError as p:
|
||||
self.write(str(p))
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = args_node.attr[1] if self.version >= 3.0 else 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
params = [build_param(ast, name, default) for
|
||||
name, default in zip_longest(paramnames, defparams, fillvalue=None)]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
if self.version > 3.0:
|
||||
params.append('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
else:
|
||||
params.append('*%s' % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if isLambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
|
||||
else:
|
||||
if isLambda:
|
||||
self.write("lambda ")
|
||||
else:
|
||||
self.write("(")
|
||||
|
||||
if 4 & code.co_flags: # flag 2 -> variable number of args
|
||||
self.write('*%s' % code.co_varnames[argc + kw_pairs])
|
||||
argc += 1
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
self.write(",".join(paramnames[:i]))
|
||||
suffix = ', ' if i > 0 else ''
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
self.write(suffix)
|
||||
self.write(paramnames[i] + '=')
|
||||
i += 1
|
||||
self.preorder(n)
|
||||
suffix = ', '
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
if not 3.0 <= self.version <= 3.2:
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
continue
|
||||
elif self.version >= 3.4 and n.type != 'kwargs':
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
break
|
||||
else:
|
||||
kwargs = node[0]
|
||||
last = len(kwargs)-1
|
||||
i = 0
|
||||
for n in node[0]:
|
||||
if n == 'kwarg':
|
||||
self.write('%s=' % n[0].pattr)
|
||||
self.preorder(n[1])
|
||||
if i < last:
|
||||
self.write(', ')
|
||||
i += 1
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
if 8 & code.co_flags: # flag 3 -> keyword args
|
||||
if argc > 0:
|
||||
self.write(', ')
|
||||
self.write('**%s' % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if isLambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if len(code.co_consts)>0 and code.co_consts[0] is not None and not isLambda: # ugly
|
||||
# docstring exists, dump it
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
for g in ((all_globals & self.mod_globs) | find_globals(ast, set())):
|
||||
self.println(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = 'None' in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
||||
|
||||
def build_class(self, code):
|
||||
"""Dump class definition, doc string and class body."""
|
||||
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.9.2'
|
||||
VERSION='2.9.4'
|
||||
|
Reference in New Issue
Block a user