You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 09:22:40 +08:00
Compare commits
30 Commits
release-2.
...
release-2.
Author | SHA1 | Date | |
---|---|---|---|
|
4377354cf9 | ||
|
6caa2c12fa | ||
|
3153a955d4 | ||
|
6f3a88d7e2 | ||
|
109737cbef | ||
|
05733c6171 | ||
|
6765a2ea97 | ||
|
c85496a92d | ||
|
e4ba73adfb | ||
|
7bf93980ce | ||
|
8241a5e3a8 | ||
|
faac11ad8c | ||
|
fe04b97c6b | ||
|
62f6220082 | ||
|
11e6eff427 | ||
|
2286aa5320 | ||
|
72ac7eb27c | ||
|
a8c5f71cfe | ||
|
feec241da8 | ||
|
c5f359f9be | ||
|
bfe8357f52 | ||
|
ceb47aba9c | ||
|
08720474bf | ||
|
119bb9bb26 | ||
|
4455b5e280 | ||
|
dcbf8d2cf7 | ||
|
b52baddab6 | ||
|
03bb54f8ea | ||
|
313e468bdc | ||
|
dc80b140c6 |
@@ -3,10 +3,11 @@ language: python
|
||||
sudo: false
|
||||
|
||||
python:
|
||||
- '2.6'
|
||||
- '2.7'
|
||||
- '3.4'
|
||||
- '3.5'
|
||||
- '2.7'
|
||||
- '2.6'
|
||||
- '3.4'
|
||||
- '3.2'
|
||||
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
|
138
ChangeLog
138
ChangeLog
@@ -1,6 +1,142 @@
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/version.py: Get ready for release 2.3.4
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Remove pypy3 add python 3.2 testing Reorder list for testing preference
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Remove pypy
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* Makefile, test/Makefile, uncompyle6/semantics/pysource.py: Fix up
|
||||
3.2 tests Remove pypy
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* .travis.yml: Try pypy and pypy3
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* test/simple_source/def/05_abc_class.py,
|
||||
test/simple_source/def/06_classbug.py, uncompyle6/parsers/parse3.py:
|
||||
Python 3.5 abc.py bug distilled
|
||||
|
||||
2016-05-05 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/dis35.py, uncompyle6/scanners/scanner35.py:
|
||||
Add cross-Python-protable 3.5 dis module
|
||||
|
||||
2016-05-04 rocky <rocky@gnu.org>
|
||||
|
||||
* test/simple_source/stmts/05_with.py,
|
||||
uncompyle6/opcodes/opcode_35.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner35.py:
|
||||
Handle 3.5 with [as] scanner35.py: Fix a small variable-name typo
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* : One more test
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/scanner3.py,
|
||||
uncompyle6/scanners/scanner34.py, uncompyle6/scanners/scanner35.py:
|
||||
Don't repeat next_except_jump
|
||||
|
||||
2016-05-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py, requirements.txt: Wrong package name
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* __pkginfo__.py, requirements.txt, setup.py: More package
|
||||
administrivia
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanner.py: Remove one more old-style Python class
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/scanners/scanner27.py: DRY Python 2.7 scanner more
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* MANIFEST.in: Include LICENSE in package
|
||||
|
||||
2016-05-03 rocky <rocky@gnu.org>
|
||||
|
||||
* ChangeLog, NEWS, uncompyle6/version.py: Get ready for release
|
||||
2.3.3
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* README.rst: Be more explicit that we need Python 2.6 or later
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* : commit feec241da88107b97bbdfbabeb3ae7131a7aa923 Author: rocky
|
||||
<rocky@gnu.org> Date: Mon May 2 21:20:17 2016 -0400
|
||||
|
||||
2016-05-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* README.rst: Note relation to other uncompyle forks Add some other minor corrections and additions as well.
|
||||
|
||||
2016-05-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/__init__.py: Trivial spacing change
|
||||
|
||||
2016-05-02 rocky <rocky@gnu.org>
|
||||
|
||||
* ChangeLog, NEWS, __pkginfo__.py, bin/pydisassemble,
|
||||
bin/uncompyle6, setup.py, uncompyle6/__init__.py,
|
||||
uncompyle6/version.py: Add -V | --version and simplfy changing it
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/__init__.py: Expose uncompyle_file
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* test/Makefile, uncompyle6/semantics/pysource.py: Bug
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* test/Makefile, test/simple_source/expression/05_const_map.py: Add
|
||||
test for last fix. Drop 2.5 test until we figure out what's wrong
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/parsers/parse3.py, uncompyle6/scanners/scanner35.py,
|
||||
uncompyle6/semantics/pysource.py: Bug in 3.5 constant map parsing
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/__init__.py: Export module load and fns load_file,
|
||||
load_module
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* __pkginfo__.py, setup.py, uncompyle6/marsh.py: License is MIT marsh.py: remove unused import
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Forgot to define Python3ParserSingle
|
||||
|
||||
2016-05-01 rocky <rocky@gnu.org>
|
||||
|
||||
* uncompyle6/parser.py, uncompyle6/parsers/parse2.py,
|
||||
uncompyle6/parsers/parse3.py: Start to DRY Python2 and Python3
|
||||
grammars Separate out 3.2, and 3.5+ specific grammar code
|
||||
|
||||
2016-04-30 rocky <rocky@gnu.org>
|
||||
|
||||
* README.rst, __pkginfo__.py: Get ready for release 2.3.0
|
||||
* ChangeLog, NEWS, README.rst, __pkginfo__.py: Get ready for release
|
||||
2.3.1
|
||||
|
||||
2016-04-30 rocky <rocky@gnu.org>
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
include README.rst
|
||||
include HISTORY.md
|
||||
include ChangeLog
|
||||
include HISTORY.md
|
||||
include LICENSE
|
||||
include __pkginfo__.py
|
||||
recursive-include uncompyle6 *.py
|
||||
include bin/uncompyle6
|
||||
|
2
Makefile
2
Makefile
@@ -28,7 +28,7 @@ check-2.7 check-3.3 check-3.4: pytest
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#: Tests for Python 3.5 - pytest doesn't work here
|
||||
check-3.5:
|
||||
check-3.2 check-3.5:
|
||||
$(MAKE) -C test $@
|
||||
|
||||
#:Tests for Python 2.6 (doesn't have pytest)
|
||||
|
24
NEWS
24
NEWS
@@ -1,4 +1,26 @@
|
||||
uncompyle6 2.2.1 2016-04-30
|
||||
uncompyle6 2.3.4 2016-05-5
|
||||
|
||||
- More Python 3.5 parsing bugs addressed
|
||||
- decompiling Python 3.5 from other Python versions works
|
||||
- test from Python 3.2
|
||||
- remove "__module__ = __name__" in 3.0 <= Python 3.2
|
||||
|
||||
uncompyle6 2.3.3 2016-05-3
|
||||
|
||||
- Fix bug in running uncompyle6 script on Python 3
|
||||
- Speed up performance on deparsing long lists by grouping in chunks of 32 and 256 items
|
||||
- DRY Python expressions between Python 2 and 3
|
||||
|
||||
uncompyle6 2.3.2 2016-05-1
|
||||
|
||||
- Add --version option standalone scripts
|
||||
- Correct License information in package
|
||||
- expose fns uncompyle_file, load_file, and load_module
|
||||
- Start to DRY Python2 and Python3 grammars Separate out 3.2, and 3.5+
|
||||
specific grammar code
|
||||
- Fix bug in 3.5+ constant map parsing
|
||||
|
||||
uncompyle6 2.3.0, 2.3.1 2016-04-30
|
||||
|
||||
- Require spark_parser >= 1.1.0
|
||||
|
||||
|
21
README.rst
21
README.rst
@@ -4,7 +4,7 @@ uncompyle6
|
||||
==========
|
||||
|
||||
A native Python bytecode Disassembler, Decompiler, Fragment Decompiler
|
||||
and bytecode library
|
||||
and bytecode library. Follows in the tradition of decompyle, uncompyle, and uncompyle2.
|
||||
|
||||
|
||||
Introduction
|
||||
@@ -12,12 +12,17 @@ Introduction
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 2.5 to 3.4 or
|
||||
so and has been tested on Python running versions 2.6, 2.7, 3.3,
|
||||
3.4 and 3.5.
|
||||
so. The code requires Python 2.6 or later and has been tested on Python
|
||||
running versions 2.6, 2.7, 3.3, 3.4 and 3.5.
|
||||
|
||||
Why this?
|
||||
---------
|
||||
|
||||
There were a number of decompyle, uncompile, uncompyle2, uncompyle3
|
||||
forks around. All of them come basically from the same code base, and
|
||||
almost all of them not maintained very well. This code pulls these together
|
||||
and addresses a number of open issues in those.
|
||||
|
||||
What makes this different from other CPython bytecode decompilers? Its
|
||||
ability to deparse just fragments and give source-code information
|
||||
around a given bytecode offset.
|
||||
@@ -81,20 +86,22 @@ Run
|
||||
./bin/uncompyle6 -h
|
||||
./bin/pydisassemble -h
|
||||
|
||||
for usage help
|
||||
for usage help.
|
||||
|
||||
|
||||
Known Bugs/Restrictions
|
||||
-----------------------
|
||||
|
||||
Python 2 deparsing is probably as solid as the various versions of
|
||||
uncompyle2. Python 3 deparsing is okay but not as solid.
|
||||
uncompyle2. Python 3 deparsing is okay but not as solid. Python 3.5 is missing some of new opcodes added, but still often works.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
* https://github.com/zrax/pycdc
|
||||
* https://code.google.com/p/unpyc3/
|
||||
* https://github.com/zrax/pycdc : supports all versions of Python and is written in C++
|
||||
* https://code.google.com/archive/p/unpyc3/ : supports Python 3.2 only
|
||||
|
||||
The above projects use a different decompiling technique what is used here.
|
||||
|
||||
The HISTORY file.
|
||||
|
||||
|
@@ -12,7 +12,7 @@ copyright = """
|
||||
Copyright (C) 2015, 2016 Rocky Bernstein <rb@dustyfeet.com>.
|
||||
"""
|
||||
|
||||
classifiers = ['Development Status :: 3 - Alpha',
|
||||
classifiers = ['Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Operating System :: OS Independent',
|
||||
'Programming Language :: Python',
|
||||
@@ -31,10 +31,10 @@ classifiers = ['Development Status :: 3 - Alpha',
|
||||
author = "Rocky Bernstein, Hartmut Goebel, John Aycock, and others"
|
||||
author_email = "rb@dustyfeet.com"
|
||||
ftp_url = None
|
||||
install_requires = ['python-spark >= 1.1.0']
|
||||
install_requires = ['spark-parser >= 1.1.1']
|
||||
license = 'GPL'
|
||||
|
||||
# license = 'BSDish'
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
packages = ['uncompyle6', 'uncompyle6.opcodes', 'uncompyle6.semantics', 'uncompyle6.scanners', 'uncompyle6.parsers']
|
||||
@@ -50,7 +50,6 @@ def get_srcdir():
|
||||
return os.path.realpath(filename)
|
||||
|
||||
ns = {}
|
||||
version = '2.3.1'
|
||||
web = 'https://github.com/rocky/python-uncompyle6/'
|
||||
|
||||
# tracebacks in zip files are funky and not debuggable
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
# Mode: -*- python -*-
|
||||
#
|
||||
# Copyright (c) 2015 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein <rb@dustyfeet.com>
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt
|
||||
@@ -9,11 +9,13 @@ import sys, os, getopt
|
||||
program = os.path.basename(__file__)
|
||||
|
||||
__doc__ = """
|
||||
Usage: %s [OPTIONS]... FILE
|
||||
Usage:
|
||||
%s [OPTIONS]... FILE
|
||||
%s [--help | -h | -V | --version]
|
||||
|
||||
Examples:
|
||||
%s foo.pyc
|
||||
%s foo.py
|
||||
%s foo.pyc
|
||||
%s foo.py
|
||||
%s -o foo.pydis foo.pyc
|
||||
%s -o /tmp foo.pyc
|
||||
|
||||
@@ -24,7 +26,7 @@ Options:
|
||||
<path>
|
||||
--help show this message
|
||||
|
||||
""" % ((program,) * 5)
|
||||
""" % ((program,) * 6)
|
||||
|
||||
|
||||
Usage_short = \
|
||||
@@ -32,6 +34,7 @@ Usage_short = \
|
||||
|
||||
from uncompyle6 import check_python_version
|
||||
from uncompyle6.disas import disassemble_files
|
||||
from uncompyle6.version import VERSION
|
||||
|
||||
check_python_version(program)
|
||||
|
||||
@@ -40,7 +43,7 @@ out_base = None
|
||||
|
||||
|
||||
try:
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'ho:', ['help'])
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hVo:', ['help', 'version'])
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
sys.exit(-1)
|
||||
@@ -48,6 +51,9 @@ except getopt.GetoptError as e:
|
||||
for opt, val in opts:
|
||||
if opt in ('-h', '--help'):
|
||||
print(__doc__)
|
||||
sys.exit(1)
|
||||
elif opt in ('-V', '--version'):
|
||||
print("%s %s" % (program, VERSION))
|
||||
sys.exit(0)
|
||||
elif opt == '-o':
|
||||
outfile = val
|
||||
|
@@ -1,16 +1,23 @@
|
||||
#!/usr/bin/env python
|
||||
# Mode: -*- python -*-
|
||||
#
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 2015 by Rocky Bernstein
|
||||
#
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt, time
|
||||
|
||||
"""
|
||||
Usage: uncompyle6 [OPTIONS]... [ FILE | DIR]...
|
||||
program = os.path.basename(__file__)
|
||||
|
||||
__doc__ = """
|
||||
Usage:
|
||||
%s [OPTIONS]... [ FILE | DIR]...
|
||||
%s [--help | -h | --V | --version]
|
||||
|
||||
Examples:
|
||||
uncompyle6 foo.pyc bar.pyc # decompile foo.pyc, bar.pyc to stdout
|
||||
uncompyle6 -o . foo.pyc bar.pyc # decompile to ./foo.pyc_dis and ./bar.pyc_dis
|
||||
uncompyle6 -o /tmp /usr/lib/python1.5 # decompile whole library
|
||||
%s foo.pyc bar.pyc # decompile foo.pyc, bar.pyc to stdout
|
||||
%s -o . foo.pyc bar.pyc # decompile to ./foo.pyc_dis and ./bar.pyc_dis
|
||||
%s -o /tmp /usr/lib/python1.5 # decompile whole library
|
||||
|
||||
Options:
|
||||
-o <path> output decompiled files to this path:
|
||||
@@ -34,26 +41,25 @@ Options:
|
||||
Debugging Options:
|
||||
--asm -a include byte-code (disables --verify)
|
||||
--grammar -g show matching grammar
|
||||
--treee -t include syntax tree (disables --verify)
|
||||
--tree -t include syntax tree (disables --verify)
|
||||
|
||||
Extensions of generated files:
|
||||
'.pyc_dis' '.pyo_dis' successfully decompiled (and verified if --verify)
|
||||
+ '_unverified' successfully decompile but --verify failed
|
||||
+ '_failed' decompile failed (contact author for enhancement)
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys, os, getopt, time
|
||||
""" % ((program,) * 5)
|
||||
|
||||
program = os.path.basename(__file__)
|
||||
|
||||
from uncompyle6 import verify, check_python_version
|
||||
from uncompyle6.main import main, status_msg
|
||||
from uncompyle6.version import VERSION
|
||||
|
||||
def usage():
|
||||
print("""usage:
|
||||
%s [--help] [--verify] [--asm] [--tree] [--grammar] [-o <path>] FILE|DIR...
|
||||
""" % program)
|
||||
%s [--verify] [--asm] [--tree] [--grammar] [-o <path>] FILE|DIR...
|
||||
%s [--help | -h | --version | -V]
|
||||
""" % (program, program))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -68,8 +74,8 @@ timestamp = False
|
||||
timestampfmt = "# %Y.%m.%d %H:%M:%S %Z"
|
||||
|
||||
try:
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hagtdro:c:p:',
|
||||
'help asm grammar recurse timestamp tree verify '
|
||||
opts, files = getopt.getopt(sys.argv[1:], 'hagtdrVo:c:p:',
|
||||
'help asm grammar recurse timestamp tree verify version '
|
||||
'showgrammar'.split(' '))
|
||||
except getopt.GetoptError as e:
|
||||
print('%s: %s' % (os.path.basename(sys.argv[0]), e), file=sys.stderr)
|
||||
@@ -80,6 +86,9 @@ for opt, val in opts:
|
||||
if opt in ('-h', '--help'):
|
||||
print(__doc__)
|
||||
sys.exit(0)
|
||||
elif opt in ('-V', '--version'):
|
||||
print("%s %s" % (program, VERSION))
|
||||
sys.exit(0)
|
||||
elif opt == '--verify':
|
||||
options['do_verify'] = True
|
||||
elif opt in ('--asm', '-a'):
|
||||
|
@@ -1 +1 @@
|
||||
spark_parser >= 1.1.0
|
||||
spark-parser >= 1.1.1
|
||||
|
23
setup.py
23
setup.py
@@ -2,29 +2,24 @@
|
||||
|
||||
"""Setup script for the 'uncompyle6' distribution."""
|
||||
|
||||
# Get the package information used in setup().
|
||||
# from __pkginfo__ import \
|
||||
# author, author_email, classifiers, \
|
||||
# install_requires, license, long_description, \
|
||||
# modname, packages, py_modules, \
|
||||
# short_desc, version, web, zip_safe
|
||||
|
||||
from __pkginfo__ import \
|
||||
author, author_email, \
|
||||
long_description, \
|
||||
author, author_email, install_requires, \
|
||||
license, long_description, classifiers, \
|
||||
modname, packages, py_modules, scripts, \
|
||||
short_desc, version, web, zip_safe
|
||||
short_desc, web, zip_safe
|
||||
|
||||
__import__('pkg_resources')
|
||||
from setuptools import setup
|
||||
|
||||
exec(open('uncompyle6/version.py').read())
|
||||
|
||||
setup(
|
||||
author = author,
|
||||
author_email = author_email,
|
||||
# classifiers = classifiers,
|
||||
classifiers = classifiers,
|
||||
description = short_desc,
|
||||
# install_requires = install_requires,
|
||||
# license = license,
|
||||
install_requires = install_requires,
|
||||
license = license,
|
||||
long_description = long_description,
|
||||
py_modules = py_modules,
|
||||
name = modname,
|
||||
@@ -33,5 +28,5 @@ setup(
|
||||
url = web,
|
||||
setup_requires = ['nose>=1.0'],
|
||||
scripts = scripts,
|
||||
version = version,
|
||||
version = VERSION,
|
||||
zip_safe = zip_safe)
|
||||
|
@@ -22,6 +22,10 @@ check:
|
||||
#: Run working tests from Python 2.6 or 2.7
|
||||
check-2.6 check-2.7: check-bytecode-sans-3.5 check-2.7-ok
|
||||
|
||||
#: Run working tests from Python 3.2
|
||||
check-3.2: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.2 --verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.3
|
||||
check-3.3: check-bytecode
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.3 --verify $(COMPILE)
|
||||
|
BIN
test/bytecode_2.7/05_long_list.pyc
Normal file
BIN
test/bytecode_2.7/05_long_list.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.4/05_const_map.pyc
Normal file
BIN
test/bytecode_3.4/05_const_map.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4/05_with.pyc
Normal file
BIN
test/bytecode_3.4/05_with.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_abc_class.pyc
Normal file
BIN
test/bytecode_3.5/05_abc_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_const_map.pyc
Normal file
BIN
test/bytecode_3.5/05_const_map.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_long_list.pyc
Normal file
BIN
test/bytecode_3.5/05_long_list.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5/05_with.pyc
Normal file
BIN
test/bytecode_3.5/05_with.pyc
Normal file
Binary file not shown.
Binary file not shown.
14
test/simple_source/def/05_abc_class.py
Normal file
14
test/simple_source/def/05_abc_class.py
Normal file
@@ -0,0 +1,14 @@
|
||||
# Python3.5 bug from abc.py:
|
||||
# stmt ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
#
|
||||
# And this gets ignored.
|
||||
|
||||
# Note this is similar to 06_classbug.py but not the same.
|
||||
# classmethod -> object
|
||||
|
||||
class abstractclassmethod(classmethod):
|
||||
__isabstractmethod__ = True
|
||||
|
||||
def __init__(self, callable):
|
||||
callable.__isabstractmethod__ = True
|
||||
super().__init__(callable)
|
@@ -5,6 +5,9 @@
|
||||
# LOAD_FAST '__locals__'
|
||||
# STORE_LOCALS ''
|
||||
|
||||
# Note this is similar to 05_abc_class.py but not the same:
|
||||
# object -> classmethod
|
||||
|
||||
class abstractclassmethod(object):
|
||||
"""A Python 3.2 STORE_LOCALS bug
|
||||
"""
|
||||
|
7
test/simple_source/expression/05_const_map.py
Normal file
7
test/simple_source/expression/05_const_map.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Addresses a bug in the way Python 3.5+ handles
|
||||
# creation of map constants
|
||||
opts = {'highlight': True,
|
||||
'start_line': -1,
|
||||
'end_line': None
|
||||
}
|
||||
print(opts)
|
3
test/simple_source/expression/05_long_list.py
Normal file
3
test/simple_source/expression/05_long_list.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# Long lists pose a slowdown in uncompiling.
|
||||
x = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
|
||||
print(x)
|
2
test/simple_source/stmts/05_with.py
Normal file
2
test/simple_source/stmts/05_with.py
Normal file
@@ -0,0 +1,2 @@
|
||||
with (sys) as f:
|
||||
print(f)
|
@@ -30,13 +30,15 @@ from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
__docformat__ = 'restructuredtext'
|
||||
|
||||
PYTHON3 = (sys.version_info >= (3, 0))
|
||||
|
||||
# We do this crazy way to support Python 2.6 which
|
||||
# doesn't support version_major, and has a bug in
|
||||
# floating point so we can't divide 26 by 10 and get
|
||||
# 2.6
|
||||
PYTHON_VERSION = sys.version_info[0]+ (sys.version_info[1] / 10.0)
|
||||
PYTHON_VERSION = sys.version_info[0] + (sys.version_info[1] / 10.0)
|
||||
PYTHON_VERSION_STR = "%s.%s" % (sys.version_info[0], sys.version_info[1])
|
||||
|
||||
sys.setrecursionlimit(5000)
|
||||
@@ -50,6 +52,11 @@ def check_python_version(program):
|
||||
|
||||
import uncompyle6.semantics.pysource
|
||||
import uncompyle6.semantics.fragments
|
||||
import uncompyle6.load
|
||||
|
||||
# Export some functions
|
||||
from uncompyle6.load import load_module, load_file
|
||||
from uncompyle6.main import uncompyle_file
|
||||
|
||||
# Conventience functions so you can say:
|
||||
# from uncompyle6 import deparse_code
|
||||
|
@@ -118,14 +118,14 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
os.remove(outfile)
|
||||
sys.stderr.write("\nLast file: %s " % (infile))
|
||||
raise
|
||||
except:
|
||||
failed_files += 1
|
||||
if outfile:
|
||||
outstream.close()
|
||||
os.rename(outfile, outfile + '_failed')
|
||||
else:
|
||||
sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
# except:
|
||||
# failed_files += 1
|
||||
# if outfile:
|
||||
# outstream.close()
|
||||
# os.rename(outfile, outfile + '_failed')
|
||||
# else:
|
||||
# sys.stderr.write("\n# %s" % sys.exc_info()[1])
|
||||
# sys.stderr.write("\n# Can't uncompile %s\n" % infile)
|
||||
else: # uncompile successful
|
||||
if outfile:
|
||||
outstream.close()
|
||||
|
@@ -18,7 +18,6 @@ from __future__ import print_function
|
||||
import sys, types
|
||||
from struct import unpack
|
||||
|
||||
import uncompyle6.scanners.scanner3 as scan3
|
||||
from uncompyle6.magics import PYTHON_MAGIC_INT
|
||||
from uncompyle6.code import Code3
|
||||
|
||||
|
@@ -37,6 +37,9 @@ rm_op(opname, opmap, 'STORE_LOCALS', 69)
|
||||
def_op('YIELD_FROM', 72)
|
||||
def_op('LOAD_CLASSDEREF', 148)
|
||||
|
||||
# These are removed since Python 3.4
|
||||
rm_op(opname, opmap, 'WITH_CLEANUP', 81)
|
||||
|
||||
# These are new since Python 3.4
|
||||
def_op('BINARY_MATRIX_MULTIPLY', 16)
|
||||
def_op('INPLACE_MATRIX_MULTIPLY', 17)
|
||||
|
@@ -192,6 +192,199 @@ class PythonParser(GenericASTBuilder):
|
||||
load_attrs ::= load_attrs LOAD_ATTR
|
||||
'''
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
"""
|
||||
expr ::= list_compr
|
||||
list_compr ::= BUILD_LIST_0 list_iter
|
||||
|
||||
list_iter ::= list_for
|
||||
list_iter ::= list_if
|
||||
list_iter ::= list_if_not
|
||||
list_iter ::= lc_body
|
||||
|
||||
_come_from ::= COME_FROM
|
||||
_come_from ::=
|
||||
|
||||
list_if ::= expr jmp_false list_iter
|
||||
list_if_not ::= expr jmp_true list_iter
|
||||
|
||||
lc_body ::= expr LIST_APPEND
|
||||
"""
|
||||
|
||||
def p_setcomp(self, args):
|
||||
"""
|
||||
expr ::= setcomp
|
||||
|
||||
setcomp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
stmt ::= setcomp_func
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_ifnot
|
||||
comp_iter ::= comp_for
|
||||
comp_iter ::= comp_body
|
||||
comp_body ::= set_comp_body
|
||||
comp_body ::= gen_comp_body
|
||||
comp_body ::= dict_comp_body
|
||||
set_comp_body ::= expr SET_ADD
|
||||
gen_comp_body ::= expr YIELD_VALUE POP_TOP
|
||||
dict_comp_body ::= expr expr MAP_ADD
|
||||
|
||||
comp_if ::= expr jmp_false comp_iter
|
||||
comp_ifnot ::= expr jmp_true comp_iter
|
||||
"""
|
||||
|
||||
def p_expr(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
slice0 ::= expr SLICE+0
|
||||
slice0 ::= expr DUP_TOP SLICE+0
|
||||
slice1 ::= expr expr SLICE+1
|
||||
slice1 ::= expr expr DUP_TOPX_2 SLICE+1
|
||||
slice2 ::= expr expr SLICE+2
|
||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||
slice3 ::= expr expr expr SLICE+3
|
||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
or ::= expr jmp_true expr _come_from
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
|
||||
expr32 ::= expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr expr
|
||||
expr1024 ::= expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32 expr32
|
||||
'''
|
||||
|
||||
|
||||
def parse(p, tokens, customize):
|
||||
p.add_custom_rules(tokens, customize)
|
||||
@@ -201,12 +394,11 @@ def parse(p, tokens, customize):
|
||||
|
||||
|
||||
def get_python_parser(version, debug_parser, compile_mode='exec'):
|
||||
"""
|
||||
Returns parser object for Python version 2 or 3
|
||||
depending on the parameter passed. *compile_mode*
|
||||
is either 'exec', 'eval', or 'single'. See
|
||||
https://docs.python.org/3.6/library/functions.html#compile for an explanation
|
||||
of the different modes.
|
||||
"""Returns parser object for Python version 2 or 3, 3.2, 3.5on,
|
||||
etc., depending on the parameters passed. *compile_mode* is either
|
||||
'exec', 'eval', or 'single'. See
|
||||
https://docs.python.org/3.6/library/functions.html#compile for an
|
||||
explanation of the different modes.
|
||||
"""
|
||||
|
||||
if version < 3.0:
|
||||
@@ -217,13 +409,34 @@ def get_python_parser(version, debug_parser, compile_mode='exec'):
|
||||
p = parse2.Python2ParserSingle(debug_parser)
|
||||
else:
|
||||
import uncompyle6.parsers.parse3 as parse3
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python3Parser(debug_parser)
|
||||
if version == 3.2:
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python32Parser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python32ParserSingle(debug_parser)
|
||||
elif version >= 3.5:
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python35onParser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python35onParserSingle(debug_parser)
|
||||
else:
|
||||
p = parse3.Python3ParserSingle(debug_parser)
|
||||
if compile_mode == 'exec':
|
||||
p = parse3.Python3Parser(debug_parser)
|
||||
else:
|
||||
p = parse3.Python3ParserSingle(debug_parser)
|
||||
p.version = version
|
||||
return p
|
||||
|
||||
class PythonParserSingle(PythonParser):
|
||||
def p_call_stmt(self, args):
|
||||
'''
|
||||
# single-mode compilation. Eval-mode interactive compilation
|
||||
# drops the last rule.
|
||||
|
||||
call_stmt ::= expr POP_TOP
|
||||
call_stmt ::= expr PRINT_EXPR
|
||||
'''
|
||||
|
||||
def python_parser(version, co, out=sys.stdout, showasm=False,
|
||||
parser_debug=PARSER_DEFAULT_DEBUG):
|
||||
assert iscode(co)
|
||||
@@ -234,6 +447,8 @@ def python_parser(version, co, out=sys.stdout, showasm=False,
|
||||
for t in tokens:
|
||||
print(t)
|
||||
|
||||
# For heavy grammar debugging
|
||||
# parser_debug = {'rules': True, 'transition': True, 'reduce' : True}
|
||||
p = get_python_parser(version, parser_debug)
|
||||
return parse(p, tokens, customize)
|
||||
|
||||
|
@@ -17,9 +17,9 @@ that a later phase can tern into a sequence of ASCII text.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, nop_func
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import GenericASTBuilder, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6 import PYTHON3
|
||||
|
||||
class Python2Parser(PythonParser):
|
||||
@@ -31,52 +31,14 @@ class Python2Parser(PythonParser):
|
||||
super(Python2Parser, self).__init__(AST, 'stmts', debug=debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
'''
|
||||
expr ::= list_compr
|
||||
list_compr ::= BUILD_LIST_0 list_iter
|
||||
|
||||
list_iter ::= list_for
|
||||
list_iter ::= list_if
|
||||
list_iter ::= list_if_not
|
||||
list_iter ::= lc_body
|
||||
|
||||
_come_from ::= COME_FROM
|
||||
_come_from ::=
|
||||
|
||||
def p_list_comprehension2(self, args):
|
||||
"""
|
||||
list_for ::= expr _for designator list_iter JUMP_BACK
|
||||
list_if ::= expr jmp_false list_iter
|
||||
list_if_not ::= expr jmp_true list_iter
|
||||
"""
|
||||
|
||||
lc_body ::= expr LIST_APPEND
|
||||
def p_setcomp2(self, args):
|
||||
'''
|
||||
|
||||
def p_setcomp(self, args):
|
||||
'''
|
||||
expr ::= setcomp
|
||||
|
||||
setcomp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
stmt ::= setcomp_func
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_ifnot
|
||||
comp_iter ::= comp_for
|
||||
comp_iter ::= comp_body
|
||||
comp_body ::= set_comp_body
|
||||
comp_body ::= gen_comp_body
|
||||
comp_body ::= dict_comp_body
|
||||
set_comp_body ::= expr SET_ADD
|
||||
gen_comp_body ::= expr YIELD_VALUE POP_TOP
|
||||
dict_comp_body ::= expr expr MAP_ADD
|
||||
|
||||
comp_if ::= expr jmp_false comp_iter
|
||||
comp_ifnot ::= expr jmp_true comp_iter
|
||||
|
||||
# This is different in python3 - shout it be?
|
||||
# This is different in python3 - should it be?
|
||||
comp_for ::= expr _for designator comp_iter JUMP_BACK
|
||||
'''
|
||||
|
||||
@@ -400,69 +362,10 @@ class Python2Parser(PythonParser):
|
||||
|
||||
'''
|
||||
|
||||
def p_expr(self, args):
|
||||
def p_expr2(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= LOAD_LOCALS
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
slice0 ::= expr SLICE+0
|
||||
slice0 ::= expr DUP_TOP SLICE+0
|
||||
slice1 ::= expr expr SLICE+1
|
||||
@@ -471,79 +374,6 @@ class Python2Parser(PythonParser):
|
||||
slice2 ::= expr expr DUP_TOPX_2 SLICE+2
|
||||
slice3 ::= expr expr expr SLICE+3
|
||||
slice3 ::= expr expr expr DUP_TOPX_3 SLICE+3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
or ::= expr jmp_true expr _come_from
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
'''
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -573,7 +403,8 @@ class Python2Parser(PythonParser):
|
||||
|
||||
op = k[:k.rfind('_')]
|
||||
if op in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
rule = 'build_list ::= ' + 'expr '*v + k
|
||||
rule = ('build_list ::= ' + 'expr1024 '*(v//1024) +
|
||||
'expr32 '*((v//32)%32) + 'expr '*(v%32) + k)
|
||||
elif op in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||
rule = 'unpack ::= ' + k + ' designator'*v
|
||||
elif op == 'UNPACK_LIST':
|
||||
@@ -609,12 +440,5 @@ class Python2Parser(PythonParser):
|
||||
raise Exception('unknown customize token %s' % k)
|
||||
self.addRule(rule, nop_func)
|
||||
|
||||
class Python2ParserSingle(Python2Parser):
|
||||
def p_call_stmt(self, args):
|
||||
'''
|
||||
# single-mode compilation. eval-mode interactive compilation
|
||||
# drops the last rule.
|
||||
|
||||
call_stmt ::= expr POP_TOP
|
||||
call_stmt ::= expr PRINT_EXPR
|
||||
'''
|
||||
class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||
pass
|
||||
|
@@ -17,7 +17,7 @@ that a later phase can tern into a sequence of ASCII text.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.parser import PythonParser, nop_func
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6 import PYTHON3
|
||||
@@ -42,60 +42,26 @@ class Python3Parser(PythonParser):
|
||||
pass
|
||||
return
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
'''
|
||||
def p_list_comprehension3(self, args):
|
||||
"""
|
||||
# Python3 scanner adds LOAD_LISTCOMP. Python3 does list comprehension like
|
||||
# other comprehensions (set, dictionary).
|
||||
|
||||
# listcomp is a custom rule
|
||||
# listcomp is a custom Python3 rule
|
||||
expr ::= listcomp
|
||||
|
||||
expr ::= list_compr
|
||||
list_compr ::= BUILD_LIST_0 list_iter
|
||||
|
||||
list_iter ::= list_for
|
||||
list_iter ::= list_if
|
||||
list_iter ::= list_if_not
|
||||
list_iter ::= lc_body
|
||||
|
||||
_come_from ::= COME_FROM
|
||||
_come_from ::=
|
||||
|
||||
list_for ::= expr FOR_ITER designator list_iter JUMP_BACK
|
||||
list_if ::= expr jmp_false list_iter
|
||||
list_if_not ::= expr jmp_true list_iter
|
||||
|
||||
lc_body ::= expr LIST_APPEND
|
||||
'''
|
||||
# See also common Python p_list_comprehension
|
||||
"""
|
||||
|
||||
def p_setcomp(self, args):
|
||||
'''
|
||||
expr ::= setcomp
|
||||
|
||||
setcomp ::= LOAD_SETCOMP MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
stmt ::= setcomp_func
|
||||
|
||||
setcomp_func ::= BUILD_SET_0 LOAD_FAST FOR_ITER designator comp_iter
|
||||
JUMP_BACK RETURN_VALUE RETURN_LAST
|
||||
|
||||
comp_iter ::= comp_if
|
||||
comp_iter ::= comp_ifnot
|
||||
comp_iter ::= comp_for
|
||||
comp_iter ::= comp_body
|
||||
comp_body ::= set_comp_body
|
||||
comp_body ::= gen_comp_body
|
||||
comp_body ::= dict_comp_body
|
||||
set_comp_body ::= expr SET_ADD
|
||||
gen_comp_body ::= expr YIELD_VALUE POP_TOP
|
||||
dict_comp_body ::= expr expr MAP_ADD
|
||||
|
||||
comp_if ::= expr jmp_false comp_iter
|
||||
comp_ifnot ::= expr jmp_true comp_iter
|
||||
|
||||
# This is different in python2 - should it be?
|
||||
def p_setcomp3(self, args):
|
||||
"""
|
||||
# This is different in Python 2 - should it be?
|
||||
comp_for ::= expr _for designator comp_iter JUMP_ABSOLUTE
|
||||
'''
|
||||
|
||||
# See also common Python p_setcomp
|
||||
"""
|
||||
|
||||
def p_grammar(self, args):
|
||||
'''
|
||||
@@ -154,9 +120,6 @@ class Python3Parser(PythonParser):
|
||||
designList ::= designator designator
|
||||
designList ::= designator DUP_TOP designList
|
||||
|
||||
# FIXME: Store local is only used in Python 3.2
|
||||
designator ::= STORE_LOCALS
|
||||
|
||||
designator ::= STORE_FAST
|
||||
designator ::= STORE_NAME
|
||||
designator ::= STORE_GLOBAL
|
||||
@@ -278,9 +241,6 @@ class Python3Parser(PythonParser):
|
||||
_ifstmts_jump ::= return_if_stmts
|
||||
_ifstmts_jump ::= c_stmts_opt JUMP_FORWARD _come_from _come_from
|
||||
|
||||
# FIXME: this optimization is only used in Python 3.5 and beyond
|
||||
_ifstmts_jump ::= c_stmts_opt
|
||||
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_ABSOLUTE
|
||||
|
||||
iflaststmtl ::= testexpr c_stmts_opt JUMP_BACK
|
||||
@@ -411,145 +371,10 @@ class Python3Parser(PythonParser):
|
||||
|
||||
'''
|
||||
|
||||
def p_expr(self, args):
|
||||
def p_expr3(self, args):
|
||||
'''
|
||||
expr ::= _mklambda
|
||||
expr ::= SET_LINENO
|
||||
expr ::= LOAD_FAST
|
||||
expr ::= LOAD_NAME
|
||||
expr ::= LOAD_CONST
|
||||
expr ::= LOAD_GLOBAL
|
||||
expr ::= LOAD_DEREF
|
||||
expr ::= LOAD_LOCALS
|
||||
expr ::= LOAD_CLASSNAME
|
||||
expr ::= load_attr
|
||||
expr ::= binary_expr
|
||||
expr ::= binary_expr_na
|
||||
expr ::= build_list
|
||||
expr ::= cmp
|
||||
expr ::= mapexpr
|
||||
expr ::= and
|
||||
expr ::= and2
|
||||
expr ::= or
|
||||
expr ::= unary_expr
|
||||
expr ::= call_function
|
||||
expr ::= unary_not
|
||||
expr ::= unary_convert
|
||||
expr ::= binary_subscr
|
||||
expr ::= binary_subscr2
|
||||
expr ::= load_attr
|
||||
expr ::= get_iter
|
||||
expr ::= slice0
|
||||
expr ::= slice1
|
||||
expr ::= slice2
|
||||
expr ::= slice3
|
||||
expr ::= buildslice2
|
||||
expr ::= buildslice3
|
||||
expr ::= yield
|
||||
|
||||
binary_expr ::= expr expr binary_op
|
||||
binary_op ::= BINARY_ADD
|
||||
binary_op ::= BINARY_MULTIPLY
|
||||
binary_op ::= BINARY_AND
|
||||
binary_op ::= BINARY_OR
|
||||
binary_op ::= BINARY_XOR
|
||||
binary_op ::= BINARY_SUBTRACT
|
||||
binary_op ::= BINARY_DIVIDE
|
||||
binary_op ::= BINARY_TRUE_DIVIDE
|
||||
binary_op ::= BINARY_FLOOR_DIVIDE
|
||||
binary_op ::= BINARY_MODULO
|
||||
binary_op ::= BINARY_LSHIFT
|
||||
binary_op ::= BINARY_RSHIFT
|
||||
binary_op ::= BINARY_POWER
|
||||
|
||||
unary_expr ::= expr unary_op
|
||||
unary_op ::= UNARY_POSITIVE
|
||||
unary_op ::= UNARY_NEGATIVE
|
||||
unary_op ::= UNARY_INVERT
|
||||
|
||||
unary_not ::= expr UNARY_NOT
|
||||
unary_convert ::= expr UNARY_CONVERT
|
||||
|
||||
binary_subscr ::= expr expr BINARY_SUBSCR
|
||||
binary_subscr2 ::= expr expr DUP_TOPX_2 BINARY_SUBSCR
|
||||
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
|
||||
# Python3 drops slice0..slice3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
yield ::= expr YIELD_VALUE
|
||||
|
||||
_mklambda ::= load_closure mklambda
|
||||
_mklambda ::= mklambda
|
||||
|
||||
or ::= expr jmp_true expr _come_from
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
and ::= expr jmp_false expr _come_from
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and2 ::= _jump jmp_false COME_FROM expr COME_FROM
|
||||
|
||||
expr ::= conditional
|
||||
conditional ::= expr jmp_false expr JUMP_FORWARD expr COME_FROM
|
||||
conditional ::= expr jmp_false expr JUMP_ABSOLUTE expr
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr _jump expr COME_FROM
|
||||
|
||||
ret_expr ::= expr
|
||||
ret_expr ::= ret_and
|
||||
ret_expr ::= ret_or
|
||||
|
||||
ret_expr_or_cond ::= ret_expr
|
||||
ret_expr_or_cond ::= ret_cond
|
||||
ret_expr_or_cond ::= ret_cond_not
|
||||
|
||||
ret_and ::= expr JUMP_IF_FALSE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_or ::= expr JUMP_IF_TRUE_OR_POP ret_expr_or_cond COME_FROM
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF ret_expr_or_cond
|
||||
ret_cond_not ::= expr POP_JUMP_IF_TRUE expr RETURN_END_IF ret_expr_or_cond
|
||||
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
cmp ::= compare
|
||||
compare ::= expr expr COMPARE_OP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO POP_TOP
|
||||
_come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list1 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list1 _come_from
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP JUMP_IF_FALSE_OR_POP
|
||||
cmp_list2 COME_FROM
|
||||
cmp_list1 ::= expr DUP_TOP ROT_THREE
|
||||
COMPARE_OP jmp_false
|
||||
cmp_list2 _come_from
|
||||
cmp_list2 ::= expr COMPARE_OP JUMP_FORWARD
|
||||
cmp_list2 ::= expr COMPARE_OP RETURN_VALUE
|
||||
mapexpr ::= BUILD_MAP kvlist
|
||||
|
||||
kvlist ::= kvlist kv
|
||||
kvlist ::= kvlist kv2
|
||||
kvlist ::= kvlist kv3
|
||||
kvlist ::=
|
||||
|
||||
kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
kv3 ::= expr expr STORE_MAP
|
||||
|
||||
exprlist ::= exprlist expr
|
||||
exprlist ::= expr
|
||||
|
||||
nullexprlist ::=
|
||||
'''
|
||||
|
||||
@staticmethod
|
||||
@@ -653,7 +478,15 @@ class Python3Parser(PythonParser):
|
||||
elif opname == 'LOAD_BUILD_CLASS':
|
||||
self.custom_build_class_rule(opname, i, token, tokens, customize)
|
||||
elif opname_base in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET'):
|
||||
rule = 'build_list ::= ' + 'expr ' * token.attr + opname
|
||||
v = token.attr
|
||||
rule = ('build_list ::= ' + 'expr1024 ' * int(v//1024) +
|
||||
'expr32 ' * int((v//32)%32) + 'expr '*(v%32) + opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif self.version >= 3.5 and opname_base == 'BUILD_MAP':
|
||||
kvlist_n = "kvlist_%s" % token.attr
|
||||
rule = kvlist_n + ' ::= ' + 'expr ' * (token.attr*2)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
rule = "mapexpr ::= %s %s" % (kvlist_n, opname)
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||
rule = 'unpack ::= ' + opname + ' designator' * token.attr
|
||||
@@ -693,12 +526,37 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
return
|
||||
|
||||
class Python3ParserSingle(Python3Parser):
|
||||
def p_call_stmt(self, args):
|
||||
'''
|
||||
# single-mode compilation. Eval-mode interactive compilation
|
||||
# drops the last rule.
|
||||
class Python32Parser(Python3Parser):
|
||||
def p_32(self, args):
|
||||
"""
|
||||
# Store locals is only used in Python 3.2
|
||||
designator ::= STORE_LOCALS
|
||||
"""
|
||||
|
||||
call_stmt ::= expr POP_TOP
|
||||
call_stmt ::= expr PRINT_EXPR
|
||||
'''
|
||||
class Python3ParserSingle(Python3Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
class Python32ParserSingle(Python32Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
class Python35onParser(Python3Parser):
|
||||
def p_35on(self, args):
|
||||
"""
|
||||
# this optimization is only used in Python 3.5 and beyond
|
||||
_ifstmts_jump ::= c_stmts_opt
|
||||
|
||||
# Python 3.5+ has WITH_CLEANUP_START/FINISH
|
||||
withstmt ::= expr SETUP_WITH with_setup suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
withasstmt ::= expr SETUP_WITH designator suite_stmts_opt
|
||||
POP_BLOCK LOAD_CONST COME_FROM
|
||||
WITH_CLEANUP_START WITH_CLEANUP_FINISH END_FINALLY
|
||||
|
||||
# Python 3.5+ classes seem to end with this:
|
||||
stmt ::= LOAD_CLOSURE RETURN_VALUE RETURN_LAST
|
||||
"""
|
||||
|
||||
class Python35onParserSingle(Python35onParser, PythonParserSingle):
|
||||
pass
|
||||
|
@@ -33,7 +33,7 @@ from uncompyle6.opcodes import (opcode_25, opcode_26, opcode_27,
|
||||
opcode_32, opcode_33, opcode_34, opcode_35)
|
||||
|
||||
|
||||
class Code:
|
||||
class Code(object):
|
||||
'''
|
||||
Class for representing code-objects.
|
||||
|
||||
|
399
uncompyle6/scanners/dis35.py
Normal file
399
uncompyle6/scanners/dis35.py
Normal file
@@ -0,0 +1,399 @@
|
||||
# This is take from the python 3.5 dis module
|
||||
"""Disassembler of Python byte code into mnemonics."""
|
||||
|
||||
from dis import findlinestarts
|
||||
import types
|
||||
import collections
|
||||
import io
|
||||
|
||||
# This part is modified for cross Python compatability
|
||||
from uncompyle6.opcodes.opcode_35 import *
|
||||
from uncompyle6.opcodes.opcode_35 import opname
|
||||
|
||||
_have_code = (types.MethodType, types.FunctionType, types.CodeType, type)
|
||||
|
||||
def _try_compile(source, name):
|
||||
"""Attempts to compile the given source, first as an expression and
|
||||
then as a statement if the first approach fails.
|
||||
|
||||
Utility function to accept strings in functions that otherwise
|
||||
expect code objects
|
||||
"""
|
||||
try:
|
||||
c = compile(source, name, 'eval')
|
||||
except SyntaxError:
|
||||
c = compile(source, name, 'exec')
|
||||
return c
|
||||
|
||||
def dis(x=None):
|
||||
"""Disassemble classes, methods, functions, generators, or code.
|
||||
"""
|
||||
if x is None:
|
||||
distb()
|
||||
return
|
||||
if hasattr(x, '__func__'): # Method
|
||||
x = x.__func__
|
||||
if hasattr(x, '__code__'): # Function
|
||||
x = x.__code__
|
||||
if hasattr(x, 'gi_code'): # Generator
|
||||
x = x.gi_code
|
||||
if hasattr(x, '__dict__'): # Class or module
|
||||
items = sorted(x.__dict__.items())
|
||||
for name, x1 in items:
|
||||
if isinstance(x1, _have_code):
|
||||
print("Disassembly of %s:" % name, file)
|
||||
try:
|
||||
dis(x1, file)
|
||||
except TypeError as msg:
|
||||
print("Sorry:", msg)
|
||||
print(file)
|
||||
elif isinstance(x, (bytes, bytearray)): # Raw bytecode
|
||||
_disassemble_bytes(x, file)
|
||||
else:
|
||||
raise TypeError("don't know how to disassemble %s objects" %
|
||||
type(x).__name__)
|
||||
|
||||
# The inspect module interrogates this dictionary to build its
|
||||
# list of CO_* constants. It is also used by pretty_flags to
|
||||
# turn the co_flags field into a human readable list.
|
||||
COMPILER_FLAG_NAMES = {
|
||||
1: "OPTIMIZED",
|
||||
2: "NEWLOCALS",
|
||||
4: "VARARGS",
|
||||
8: "VARKEYWORDS",
|
||||
16: "NESTED",
|
||||
32: "GENERATOR",
|
||||
64: "NOFREE",
|
||||
128: "COROUTINE",
|
||||
256: "ITERABLE_COROUTINE",
|
||||
}
|
||||
|
||||
def pretty_flags(flags):
|
||||
"""Return pretty representation of code flags."""
|
||||
names = []
|
||||
for i in range(32):
|
||||
flag = 1<<i
|
||||
if flags & flag:
|
||||
names.append(COMPILER_FLAG_NAMES.get(flag, hex(flag)))
|
||||
flags ^= flag
|
||||
if not flags:
|
||||
break
|
||||
else:
|
||||
names.append(hex(flags))
|
||||
return ", ".join(names)
|
||||
|
||||
def _get_code_object(x):
|
||||
"""Helper to handle methods, functions, generators, strings and raw code objects"""
|
||||
if hasattr(x, '__func__'): # Method
|
||||
x = x.__func__
|
||||
if hasattr(x, '__code__'): # Function
|
||||
x = x.__code__
|
||||
if hasattr(x, 'gi_code'): # Generator
|
||||
x = x.gi_code
|
||||
if isinstance(x, str): # Source code
|
||||
x = _try_compile(x, "<disassembly>")
|
||||
if hasattr(x, 'co_code'): # Code object
|
||||
return x
|
||||
raise TypeError("don't know how to disassemble %s objects" %
|
||||
type(x).__name__)
|
||||
|
||||
def code_info(x):
|
||||
"""Formatted details of methods, functions, or code."""
|
||||
return _format_code_info(_get_code_object(x))
|
||||
|
||||
def _format_code_info(co):
|
||||
lines = []
|
||||
lines.append("Name: %s" % co.co_name)
|
||||
lines.append("Filename: %s" % co.co_filename)
|
||||
lines.append("Argument count: %s" % co.co_argcount)
|
||||
lines.append("Kw-only arguments: %s" % co.co_kwonlyargcount)
|
||||
lines.append("Number of locals: %s" % co.co_nlocals)
|
||||
lines.append("Stack size: %s" % co.co_stacksize)
|
||||
lines.append("Flags: %s" % pretty_flags(co.co_flags))
|
||||
if co.co_consts:
|
||||
lines.append("Constants:")
|
||||
for i_c in enumerate(co.co_consts):
|
||||
lines.append("%4d: %r" % i_c)
|
||||
if co.co_names:
|
||||
lines.append("Names:")
|
||||
for i_n in enumerate(co.co_names):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_varnames:
|
||||
lines.append("Variable names:")
|
||||
for i_n in enumerate(co.co_varnames):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_freevars:
|
||||
lines.append("Free variables:")
|
||||
for i_n in enumerate(co.co_freevars):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
if co.co_cellvars:
|
||||
lines.append("Cell variables:")
|
||||
for i_n in enumerate(co.co_cellvars):
|
||||
lines.append("%4d: %s" % i_n)
|
||||
return "\n".join(lines)
|
||||
|
||||
def show_code(co):
|
||||
"""Print details of methods, functions, or code to *file*.
|
||||
|
||||
If *file* is not provided, the output is printed on stdout.
|
||||
"""
|
||||
print(code_info(co))
|
||||
|
||||
_Instruction = collections.namedtuple("_Instruction",
|
||||
"opname opcode arg argval argrepr offset starts_line is_jump_target")
|
||||
|
||||
class Instruction(_Instruction):
|
||||
"""Details for a bytecode operation
|
||||
|
||||
Defined fields:
|
||||
opname - human readable name for operation
|
||||
opcode - numeric code for operation
|
||||
arg - numeric argument to operation (if any), otherwise None
|
||||
argval - resolved arg value (if known), otherwise same as arg
|
||||
argrepr - human readable description of operation argument
|
||||
offset - start index of operation within bytecode sequence
|
||||
starts_line - line started by this opcode (if any), otherwise None
|
||||
is_jump_target - True if other code jumps to here, otherwise False
|
||||
"""
|
||||
|
||||
def _disassemble(self, lineno_width=3, mark_as_current=False):
|
||||
"""Format instruction details for inclusion in disassembly output
|
||||
|
||||
*lineno_width* sets the width of the line number field (0 omits it)
|
||||
*mark_as_current* inserts a '-->' marker arrow as part of the line
|
||||
"""
|
||||
fields = []
|
||||
# Column: Source code line number
|
||||
if lineno_width:
|
||||
if self.starts_line is not None:
|
||||
lineno_fmt = "%%%dd" % lineno_width
|
||||
fields.append(lineno_fmt % self.starts_line)
|
||||
else:
|
||||
fields.append(' ' * lineno_width)
|
||||
# Column: Current instruction indicator
|
||||
if mark_as_current:
|
||||
fields.append('-->')
|
||||
else:
|
||||
fields.append(' ')
|
||||
# Column: Jump target marker
|
||||
if self.is_jump_target:
|
||||
fields.append('>>')
|
||||
else:
|
||||
fields.append(' ')
|
||||
# Column: Instruction offset from start of code sequence
|
||||
fields.append(repr(self.offset).rjust(4))
|
||||
# Column: Opcode name
|
||||
fields.append(opname.ljust(20))
|
||||
# Column: Opcode argument
|
||||
if self.arg is not None:
|
||||
fields.append(repr(self.arg).rjust(5))
|
||||
# Column: Opcode argument details
|
||||
if self.argrepr:
|
||||
fields.append('(' + self.argrepr + ')')
|
||||
return ' '.join(fields).rstrip()
|
||||
|
||||
|
||||
def get_instructions(x, first_line=None):
|
||||
"""Iterator for the opcodes in methods, functions or code
|
||||
|
||||
Generates a series of Instruction named tuples giving the details of
|
||||
each operations in the supplied code.
|
||||
|
||||
If *first_line* is not None, it indicates the line number that should
|
||||
be reported for the first source line in the disassembled code.
|
||||
Otherwise, the source line information (if any) is taken directly from
|
||||
the disassembled code object.
|
||||
"""
|
||||
co = _get_code_object(x)
|
||||
cell_names = co.co_cellvars + co.co_freevars
|
||||
linestarts = dict(findlinestarts(co))
|
||||
if first_line is not None:
|
||||
line_offset = first_line - co.co_firstlineno
|
||||
else:
|
||||
line_offset = 0
|
||||
return _get_instructions_bytes(co.co_code, co.co_varnames, co.co_names,
|
||||
co.co_consts, cell_names, linestarts,
|
||||
line_offset)
|
||||
|
||||
def _get_const_info(const_index, const_list):
|
||||
"""Helper to get optional details about const references
|
||||
|
||||
Returns the dereferenced constant and its repr if the constant
|
||||
list is defined.
|
||||
Otherwise returns the constant index and its repr().
|
||||
"""
|
||||
argval = const_index
|
||||
if const_list is not None:
|
||||
argval = const_list[const_index]
|
||||
return argval, repr(argval)
|
||||
|
||||
def _get_name_info(name_index, name_list):
|
||||
"""Helper to get optional details about named references
|
||||
|
||||
Returns the dereferenced name as both value and repr if the name
|
||||
list is defined.
|
||||
Otherwise returns the name index and its repr().
|
||||
"""
|
||||
argval = name_index
|
||||
if name_list is not None:
|
||||
argval = name_list[name_index]
|
||||
argrepr = argval
|
||||
else:
|
||||
argrepr = repr(argval)
|
||||
return argval, argrepr
|
||||
|
||||
|
||||
def _get_instructions_bytes(code, varnames=None, names=None, constants=None,
|
||||
cells=None, linestarts=None, line_offset=0):
|
||||
"""Iterate over the instructions in a bytecode string.
|
||||
|
||||
Generates a sequence of Instruction namedtuples giving the details of each
|
||||
opcode. Additional information about the code's runtime environment
|
||||
(e.g. variable names, constants) can be specified using optional
|
||||
arguments.
|
||||
|
||||
"""
|
||||
labels = findlabels(code)
|
||||
extended_arg = 0
|
||||
starts_line = None
|
||||
# enumerate() is not an option, since we sometimes process
|
||||
# multiple elements on a single pass through the loop
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
op = code[i]
|
||||
if isinstance(op, str):
|
||||
op_num = ord(op)
|
||||
else:
|
||||
op_num = op
|
||||
|
||||
offset = i
|
||||
if linestarts is not None:
|
||||
starts_line = linestarts.get(i, None)
|
||||
if starts_line is not None:
|
||||
starts_line += line_offset
|
||||
is_jump_target = i in labels
|
||||
i = i+1
|
||||
arg = None
|
||||
argval = None
|
||||
argrepr = ''
|
||||
if op >= HAVE_ARGUMENT:
|
||||
if isinstance(code[i], str):
|
||||
arg = op_num + ord(code[i+1])*256 + extended_arg
|
||||
else:
|
||||
arg = code[i] + code[i+1]*256 + extended_arg
|
||||
extended_arg = 0
|
||||
i = i+2
|
||||
if op == EXTENDED_ARG:
|
||||
extended_arg = arg*65536
|
||||
# Set argval to the dereferenced value of the argument when
|
||||
# availabe, and argrepr to the string representation of argval.
|
||||
# _disassemble_bytes needs the string repr of the
|
||||
# raw name index for LOAD_GLOBAL, LOAD_CONST, etc.
|
||||
argval = arg
|
||||
if op in hasconst:
|
||||
argval, argrepr = _get_const_info(arg, constants)
|
||||
elif op in hasname:
|
||||
argval, argrepr = _get_name_info(arg, names)
|
||||
elif op in hasjrel:
|
||||
argval = i + arg
|
||||
argrepr = "to " + repr(argval)
|
||||
elif op in haslocal:
|
||||
argval, argrepr = _get_name_info(arg, varnames)
|
||||
elif op in hascompare:
|
||||
argval = cmp_op[arg]
|
||||
argrepr = argval
|
||||
elif op in hasfree:
|
||||
argval, argrepr = _get_name_info(arg, cells)
|
||||
elif op in hasnargs:
|
||||
argrepr = "%d positional, %d keyword pair" % (code[i-2], code[i-1])
|
||||
yield Instruction(opname[op_num], op,
|
||||
arg, argval, argrepr,
|
||||
offset, starts_line, is_jump_target)
|
||||
|
||||
def findlabels(code):
|
||||
"""Detect all offsets in a byte code which are jump targets.
|
||||
|
||||
Return the list of offsets.
|
||||
|
||||
"""
|
||||
labels = []
|
||||
# enumerate() is not an option, since we sometimes process
|
||||
# multiple elements on a single pass through the loop
|
||||
n = len(code)
|
||||
i = 0
|
||||
while i < n:
|
||||
op = code[i]
|
||||
i = i+1
|
||||
if op >= HAVE_ARGUMENT:
|
||||
arg = code[i] + code[i+1]*256
|
||||
i = i+2
|
||||
label = -1
|
||||
if op in hasjrel:
|
||||
label = i+arg
|
||||
elif op in hasjabs:
|
||||
label = arg
|
||||
if label >= 0:
|
||||
if label not in labels:
|
||||
labels.append(label)
|
||||
return labels
|
||||
|
||||
class Bytecode:
|
||||
"""The bytecode operations of a piece of code
|
||||
|
||||
Instantiate this with a function, method, string of code, or a code object
|
||||
(as returned by compile()).
|
||||
|
||||
Iterating over this yields the bytecode operations as Instruction instances.
|
||||
"""
|
||||
def __init__(self, x, first_line=None, current_offset=None):
|
||||
self.codeobj = co = _get_code_object(x)
|
||||
if first_line is None:
|
||||
self.first_line = co.co_firstlineno
|
||||
self._line_offset = 0
|
||||
else:
|
||||
self.first_line = first_line
|
||||
self._line_offset = first_line - co.co_firstlineno
|
||||
self._cell_names = co.co_cellvars + co.co_freevars
|
||||
self._linestarts = dict(findlinestarts(co))
|
||||
self._original_object = x
|
||||
self.current_offset = current_offset
|
||||
|
||||
def __iter__(self):
|
||||
co = self.codeobj
|
||||
return _get_instructions_bytes(co.co_code, co.co_varnames, co.co_names,
|
||||
co.co_consts, self._cell_names,
|
||||
self._linestarts,
|
||||
line_offset=self._line_offset)
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({!r})".format(self.__class__.__name__,
|
||||
self._original_object)
|
||||
|
||||
@classmethod
|
||||
def from_traceback(cls, tb):
|
||||
""" Construct a Bytecode from the given traceback """
|
||||
while tb.tb_next:
|
||||
tb = tb.tb_next
|
||||
return cls(tb.tb_frame.f_code, current_offset=tb.tb_lasti)
|
||||
|
||||
def info(self):
|
||||
"""Return formatted information about the code object."""
|
||||
return _format_code_info(self.codeobj)
|
||||
|
||||
def dis(self):
|
||||
"""Return a formatted view of the bytecode operations."""
|
||||
co = self.codeobj
|
||||
if self.current_offset is not None:
|
||||
offset = self.current_offset
|
||||
else:
|
||||
offset = -1
|
||||
with io.StringIO() as output:
|
||||
_disassemble_bytes(co.co_code, varnames=co.co_varnames,
|
||||
names=co.co_names, constants=co.co_consts,
|
||||
cells=self._cell_names,
|
||||
linestarts=self._linestarts,
|
||||
line_offset=self._line_offset,
|
||||
file=output,
|
||||
lasti=offset)
|
||||
return output.getvalue()
|
@@ -214,12 +214,6 @@ class Scanner27(scan.Scanner):
|
||||
rv.append(Token(replace[offset], oparg, pattr, offset, linestart))
|
||||
return rv, customize
|
||||
|
||||
def op_size(self, op):
|
||||
if op < self.opc.HAVE_ARGUMENT:
|
||||
return 1
|
||||
else:
|
||||
return 3
|
||||
|
||||
def build_stmt_indices(self):
|
||||
code = self.code
|
||||
start = 0
|
||||
@@ -300,15 +294,6 @@ class Scanner27(scan.Scanner):
|
||||
i = s
|
||||
slist += [end] * (end-len(slist))
|
||||
|
||||
def remove_mid_line_ifs(self, ifs):
|
||||
filtered = []
|
||||
for i in ifs:
|
||||
if self.lines[i].l_no == self.lines[i+3].l_no:
|
||||
if self.code[self.prev[self.lines[i].next]] in (PJIT, PJIF):
|
||||
continue
|
||||
filtered.append(i)
|
||||
return filtered
|
||||
|
||||
def next_except_jump(self, start):
|
||||
'''
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
|
@@ -555,6 +555,33 @@ class Scanner3(scan.Scanner):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
def rem_or(self, start, end, instr, target=None, include_beyond_target=False):
|
||||
"""
|
||||
Find offsets of all requested <instr> between <start> and <end>,
|
||||
|
@@ -298,33 +298,6 @@ class Scanner34(scan3.Scanner3):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner34(3.4).disassemble(co)
|
||||
|
@@ -10,17 +10,14 @@ for later use in deparsing.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import dis, inspect
|
||||
import inspect
|
||||
from array import array
|
||||
import uncompyle6.scanners.scanner3 as scan3
|
||||
import uncompyle6.scanners.dis35 as dis35
|
||||
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
from uncompyle6.code import iscode
|
||||
from uncompyle6.scanner import Token
|
||||
|
||||
# Get all the opcodes into globals
|
||||
globals().update(dis.opmap)
|
||||
|
||||
import uncompyle6.opcodes.opcode_35
|
||||
# verify uses JUMP_OPs from here
|
||||
JUMP_OPs = uncompyle6.opcodes.opcode_35.JUMP_OPs
|
||||
@@ -29,13 +26,10 @@ from uncompyle6.opcodes.opcode_35 import *
|
||||
|
||||
class Scanner35(scan3.Scanner3):
|
||||
|
||||
def disassemble(self, co, classname=None, code_objects={}):
|
||||
fn = self.disassemble_built_in if PYTHON_VERSION == 3.4 \
|
||||
else self.disassemble_generic
|
||||
return fn(co, classname, code_objects=code_objects)
|
||||
|
||||
def disassemble_built_in(self, co, classname=None,
|
||||
code_objects={}):
|
||||
# Note: we can't use built-in disassembly routines, unless
|
||||
# we do post-processing like we do here.
|
||||
def disassemble(self, co, classname=None,
|
||||
code_objects={}):
|
||||
# Container for tokens
|
||||
tokens = []
|
||||
customize = {}
|
||||
@@ -46,7 +40,7 @@ class Scanner35(scan3.Scanner3):
|
||||
# Get jump targets
|
||||
# Format: {target offset: [jump offsets]}
|
||||
jump_targets = self.find_jump_targets()
|
||||
bytecode = dis.Bytecode(co)
|
||||
bytecode = dis35.Bytecode(co)
|
||||
|
||||
# self.lines contains (block,addrLastInstr)
|
||||
if classname:
|
||||
@@ -116,17 +110,11 @@ class Scanner35(scan3.Scanner3):
|
||||
pattr = const
|
||||
pass
|
||||
elif opname in ('BUILD_LIST', 'BUILD_TUPLE', 'BUILD_SET', 'BUILD_SLICE',
|
||||
'BUILD_MAP',
|
||||
'UNPACK_SEQUENCE',
|
||||
'MAKE_FUNCTION', 'MAKE_CLOSURE',
|
||||
'DUP_TOPX', 'RAISE_VARARGS'
|
||||
):
|
||||
# if opname == 'BUILD_TUPLE' and \
|
||||
# self.code[self.prev[offset]] == LOAD_CLOSURE:
|
||||
# continue
|
||||
# else:
|
||||
# op_name = '%s_%d' % (op_name, oparg)
|
||||
# if opname != BUILD_SLICE:
|
||||
# customize[op_name] = oparg
|
||||
opname = '%s_%d' % (opname, inst.argval)
|
||||
if inst.opname != 'BUILD_SLICE':
|
||||
customize[opname] = inst.argval
|
||||
@@ -137,7 +125,7 @@ class Scanner35(scan3.Scanner3):
|
||||
if target < inst.offset:
|
||||
if (inst.offset in self.stmts and
|
||||
self.code[inst.offset+3] not in (END_FINALLY, POP_BLOCK)
|
||||
and offset not in self.not_continue):
|
||||
and inst.offset not in self.not_continue):
|
||||
opname = 'CONTINUE'
|
||||
else:
|
||||
opname = 'JUMP_BACK'
|
||||
@@ -298,33 +286,6 @@ class Scanner35(scan3.Scanner3):
|
||||
else:
|
||||
self.fixed_jumps[offset] = self.restrict_to_parent(target, parent)
|
||||
|
||||
def next_except_jump(self, start):
|
||||
"""
|
||||
Return the next jump that was generated by an except SomeException:
|
||||
construct in a try...except...else clause or None if not found.
|
||||
"""
|
||||
|
||||
if self.code[start] == DUP_TOP:
|
||||
except_match = self.first_instr(start, len(self.code), POP_JUMP_IF_FALSE)
|
||||
if except_match:
|
||||
jmp = self.prev_op[self.get_target(except_match)]
|
||||
self.ignore_if.add(except_match)
|
||||
self.not_continue.add(jmp)
|
||||
return jmp
|
||||
|
||||
count_END_FINALLY = 0
|
||||
count_SETUP_ = 0
|
||||
for i in self.op_range(start, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op == END_FINALLY:
|
||||
if count_END_FINALLY == count_SETUP_:
|
||||
assert self.code[self.prev_op[i]] in (JUMP_ABSOLUTE, JUMP_FORWARD, RETURN_VALUE)
|
||||
self.not_continue.add(self.prev_op[i])
|
||||
return self.prev_op[i]
|
||||
count_END_FINALLY += 1
|
||||
elif op in (SETUP_EXCEPT, SETUP_WITH, SETUP_FINALLY):
|
||||
count_SETUP_ += 1
|
||||
|
||||
if __name__ == "__main__":
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner35(3.5).disassemble(co)
|
||||
|
@@ -973,13 +973,29 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
else:
|
||||
raise RuntimeError('Internal Error: n_build_list expects list or tuple')
|
||||
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
line_separator = ', '
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
for elem in node:
|
||||
|
||||
# FIXME:
|
||||
# if flat_elems > some_number, then group
|
||||
# do automatic wrapping
|
||||
for elem in flat_elems:
|
||||
if (elem == 'ROT_THREE'):
|
||||
continue
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015 by Rocky Bernstein
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -1148,29 +1148,42 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
assert node[-1] == 'kvlist'
|
||||
node = node[-1] # goto kvlist
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
line_seperator = ',\n' + self.indent
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
self.write('{')
|
||||
for kv in node:
|
||||
assert kv in ('kv', 'kv2', 'kv3')
|
||||
# kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
# kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
# kv3 ::= expr expr STORE_MAP
|
||||
if kv == 'kv':
|
||||
name = self.traverse(kv[-2], indent='')
|
||||
value = self.traverse(kv[1], indent=self.indent+(len(name)+2)*' ')
|
||||
elif kv == 'kv2':
|
||||
name = self.traverse(kv[1], indent='')
|
||||
value = self.traverse(kv[-3], indent=self.indent+(len(name)+2)*' ')
|
||||
elif kv == 'kv3':
|
||||
name = self.traverse(kv[-2], indent='')
|
||||
value = self.traverse(kv[0], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(sep, name, ': ', value)
|
||||
sep = line_seperator
|
||||
|
||||
if node[0].type.startswith('kvlist'):
|
||||
# Python 3.5 style key/value list in mapexpr
|
||||
l = list(node[0])
|
||||
i = 0
|
||||
while i < len(l):
|
||||
name = self.traverse(l[i], indent='')
|
||||
value = self.traverse(l[i+1], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(sep, name, ': ', value)
|
||||
sep = line_seperator
|
||||
i += 2
|
||||
else:
|
||||
assert node[-1] == 'kvlist'
|
||||
node = node[-1] # goto kvlist
|
||||
|
||||
for kv in node:
|
||||
assert kv in ('kv', 'kv2', 'kv3')
|
||||
# kv ::= DUP_TOP expr ROT_TWO expr STORE_SUBSCR
|
||||
# kv2 ::= DUP_TOP expr expr ROT_THREE STORE_SUBSCR
|
||||
# kv3 ::= expr expr STORE_MAP
|
||||
if kv == 'kv':
|
||||
name = self.traverse(kv[-2], indent='')
|
||||
value = self.traverse(kv[1], indent=self.indent+(len(name)+2)*' ')
|
||||
elif kv == 'kv2':
|
||||
name = self.traverse(kv[1], indent='')
|
||||
value = self.traverse(kv[-3], indent=self.indent+(len(name)+2)*' ')
|
||||
elif kv == 'kv3':
|
||||
name = self.traverse(kv[-2], indent='')
|
||||
value = self.traverse(kv[0], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(sep, name, ': ', value)
|
||||
sep = line_seperator
|
||||
self.write('}')
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
@@ -1182,33 +1195,49 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
lastnode = node.pop().type
|
||||
if lastnode.startswith('BUILD_LIST'):
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.type
|
||||
if lastnodetype.startswith('BUILD_LIST'):
|
||||
self.write('['); endchar = ']'
|
||||
elif lastnode.startswith('BUILD_TUPLE'):
|
||||
elif lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write('('); endchar = ')'
|
||||
elif lastnode.startswith('BUILD_SET'):
|
||||
elif lastnodetype.startswith('BUILD_SET'):
|
||||
self.write('{'); endchar = '}'
|
||||
elif lastnode.startswith('ROT_TWO'):
|
||||
elif lastnodetype.startswith('ROT_TWO'):
|
||||
self.write('('); endchar = ')'
|
||||
else:
|
||||
raise 'Internal Error: n_build_list expects list or tuple'
|
||||
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
if lastnode.attr > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
line_separator = ', '
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
for elem in node:
|
||||
if (elem == 'ROT_THREE'):
|
||||
continue
|
||||
|
||||
# FIXME:
|
||||
# if flat_elems > some_number, then group
|
||||
# do automatic wrapping
|
||||
for elem in flat_elems:
|
||||
if elem == 'ROT_THREE':
|
||||
continue
|
||||
assert elem == 'expr'
|
||||
value = self.traverse(elem)
|
||||
self.write(sep, value)
|
||||
sep = line_separator
|
||||
if len(node) == 1 and lastnode.startswith('BUILD_TUPLE'):
|
||||
if lastnode.attr == 1 and lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write(',')
|
||||
self.write(endchar)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
@@ -1529,8 +1558,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
code._tokens = None # save memory
|
||||
assert ast == 'stmts'
|
||||
|
||||
if ast[0][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[0]
|
||||
try:
|
||||
if ast[0][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[0]
|
||||
elif ast[1][0] == NAME_MODULE:
|
||||
if self.hide_internal: del ast[1]
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
qualname = '.'.join(self.classes)
|
||||
@@ -1550,9 +1584,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if (ast[0][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 0
|
||||
do_doc = True
|
||||
elif (len(ast) > 2 and 3.0 <= self.version <= 3.2 and
|
||||
ast[2][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 2
|
||||
elif (len(ast) > 1 and 3.0 <= self.version <= 3.2 and
|
||||
ast[1][0] == ASSIGN_DOC_STRING(code.co_consts[0])):
|
||||
i = 1
|
||||
do_doc = True
|
||||
if do_doc and self.hide_internal:
|
||||
self.print_docstring(indent, code.co_consts[0])
|
||||
|
3
uncompyle6/version.py
Normal file
3
uncompyle6/version.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.3.4'
|
Reference in New Issue
Block a user