You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 16:59:52 +08:00
Compare commits
19 Commits
release-py
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
914369bd36 | ||
|
d08d183fc8 | ||
|
0b3d6b8add | ||
|
5cb46c2ed3 | ||
|
163e47fb49 | ||
|
0cf32f1b70 | ||
|
fcc4aff62c | ||
|
a6e2074606 | ||
|
f0f9676f52 | ||
|
be610aa6b3 | ||
|
1494bb2049 | ||
|
44382ec78e | ||
|
d62dc3daac | ||
|
592aba9dd8 | ||
|
5ad51707e3 | ||
|
f28c255804 | ||
|
315965300f | ||
|
9bd85fe5a0 | ||
|
c6e3168c31 |
@@ -1,11 +1,3 @@
|
||||
# This configuration was automatically generated from a CircleCI 1.0 config.
|
||||
# It should include any build commands you had along with commands that CircleCI
|
||||
# inferred from your project structure. We strongly recommend you read all the
|
||||
# comments in this file to understand the structure of CircleCI 2.0, as the idiom
|
||||
# for configuration has changed substantially in 2.0 to allow arbitrary jobs rather
|
||||
# than the prescribed lifecycle of 1.0. In general, we recommend using this generated
|
||||
# configuration as a reference rather than using it in production, though in most
|
||||
# cases it should duplicate the execution of your original 1.0 config.
|
||||
version: 2
|
||||
jobs:
|
||||
build:
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,6 +6,7 @@
|
||||
/.eggs
|
||||
/.hypothesis
|
||||
/.idea
|
||||
/.mypy_cache
|
||||
/.pytest_cache
|
||||
/.python-version
|
||||
/.tox
|
||||
|
19
NEWS.md
19
NEWS.md
@@ -1,3 +1,22 @@
|
||||
4.1.0 2019-10-12 Stony Brook Ride
|
||||
=================================
|
||||
|
||||
- Fix fragment bugs
|
||||
* missing RETURN_LAST introduced when adding transformation layer
|
||||
* more parent entries on tokens
|
||||
- Preliminary support for decompiling Python 1.0, 1.1. 1.2 and 1.6
|
||||
* Newer xdis version needed
|
||||
|
||||
3.4.1 2019-10-02
|
||||
================
|
||||
|
||||
- Correct assert{,2} transforms Fixes #289
|
||||
- Fragment parsing fixes:
|
||||
* Wasn't handling 3-arg %p
|
||||
* fielding error in code_deparse()
|
||||
- Use newer xdis to better track Python 3.8.0
|
||||
|
||||
|
||||
3.4.0 2019-08-24 Totoro
|
||||
=======================
|
||||
|
||||
|
11
README.rst
11
README.rst
@@ -1,4 +1,4 @@
|
||||
|buildstatus| |Latest Version| |Supported Python Versions|
|
||||
|buildstatus| |Pypi Installs| |Latest Version| |Supported Python Versions|
|
||||
|
||||
|packagestatus|
|
||||
|
||||
@@ -13,7 +13,7 @@ Introduction
|
||||
------------
|
||||
|
||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||
source code. It accepts bytecodes from Python version 1.3 to version
|
||||
source code. It accepts bytecodes from Python version 1.0 to version
|
||||
3.8, spanning over 24 years of Python releases. We include Dropbox's
|
||||
Python 2.5 bytecode and some PyPy bytecode.
|
||||
|
||||
@@ -88,9 +88,9 @@ This uses setup.py, so it follows the standard Python routine:
|
||||
|
||||
::
|
||||
|
||||
pip install -e . # set up to run from source tree
|
||||
# Or if you want to install instead
|
||||
python setup.py install # may need sudo
|
||||
$ pip install -e . # set up to run from source tree
|
||||
# Or if you want to install instead
|
||||
$ python setup.py install # may need sudo
|
||||
|
||||
A GNU makefile is also provided so :code:`make install` (possibly as root or
|
||||
sudo) will do the steps above.
|
||||
@@ -244,3 +244,4 @@ See Also
|
||||
.. |Supported Python Versions| image:: https://img.shields.io/pypi/pyversions/uncompyle6.svg
|
||||
.. |Latest Version| image:: https://badge.fury.io/py/uncompyle6.svg
|
||||
:target: https://badge.fury.io/py/uncompyle6
|
||||
.. |Pypi Installs| image:: https://pepy.tech/badge/uncompyle6/month
|
||||
|
@@ -58,7 +58,7 @@ entry_points = {
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ["spark-parser >= 1.8.9, < 1.9.0",
|
||||
"xdis >= 4.0.3, < 4.1.0"]
|
||||
"xdis >= 4.1.0, < 4.2.0"]
|
||||
|
||||
license = "GPL3"
|
||||
mailing_list = "python-debugger@googlegroups.com"
|
||||
|
@@ -5,4 +5,4 @@ if [[ $0 == ${BASH_SOURCE[0]} ]] ; then
|
||||
echo "This script should be *sourced* rather than run directly through bash"
|
||||
exit 1
|
||||
fi
|
||||
export PYVERSIONS='3.6.8 3.7.3 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.8'
|
||||
export PYVERSIONS='3.6.9 3.7.4 2.6.9 3.3.7 2.7.16 3.2.6 3.1.5 3.4.10 3.5.7'
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[bdist_rpm]
|
||||
release = 1
|
||||
packager = Mysterie <kajusska@gmail.com>
|
||||
packager = rocky <rb@dustyfeet.com>
|
||||
doc_files = README
|
||||
# CHANGES.txt
|
||||
# USAGE.txt
|
||||
@@ -8,4 +8,4 @@ doc_files = README
|
||||
# examples/
|
||||
|
||||
[bdist_wheel]
|
||||
universal=1
|
||||
# universal=1
|
||||
|
@@ -1,5 +1,6 @@
|
||||
PHONY=check clean dist distclean test test-unit test-functional rmChangeLog clean_pyc nosetests \
|
||||
check-bytecode-1 check-bytecode-1.3 check-bytecode-1.4 check-bytecode-1.5 \
|
||||
check-bytecode-1.0 check-bytecode-1.1 check-bytecode-1.2 check-bytecode-1.3 \
|
||||
check-bytecode-1 check-bytecode-1.4 check-bytecode-1.5 check-bytecode-1.6 \
|
||||
check-bytecode-2 check-bytecode-3 check-bytecode-3-short \
|
||||
check-bytecode-2.2 check-byteocde-2.3 check-bytecode-2.4 \
|
||||
check-short check-2.6 check-2.7 check-3.0 check-3.1 check-3.2 check-3.3 \
|
||||
@@ -85,7 +86,7 @@ check-disasm:
|
||||
$(PYTHON) dis-compare.py
|
||||
|
||||
#: Check deparsing bytecode 1.x only
|
||||
check-bytecode-1: check-bytecode-1.4 check-bytecode-1.5
|
||||
check-bytecode-1: check-bytecode-1.0 check-bytecode-1.1 check-bytecode-1.2 check-bytecode-1.3 check-bytecode-1.4 check-bytecode-1.5 check-bytecode-1.6
|
||||
|
||||
#: Check deparsing bytecode 2.x only
|
||||
check-bytecode-2:
|
||||
@@ -109,6 +110,7 @@ check-bytecode-3-short:
|
||||
#: Check deparsing bytecode on all Python 2 and Python 3 versions
|
||||
check-bytecode: check-bytecode-3
|
||||
$(PYTHON) test_pythonlib.py \
|
||||
--bytecode-1.0 --bytecode-1.1 --bytecode-1.2 \
|
||||
--bytecode-1.3 --bytecode-1.4 --bytecode-1.5 \
|
||||
--bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
--bytecode-2.1 --bytecode-2.2 --bytecode-2.3 --bytecode-2.4 \
|
||||
@@ -122,6 +124,18 @@ check-bytecode-short: check-bytecode-3-short
|
||||
--bytecode-2.6 --bytecode-2.7 --bytecode-pypy2.7
|
||||
|
||||
|
||||
#: Check deparsing bytecode 1.0 only
|
||||
check-bytecode-1.0:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.0
|
||||
|
||||
#: Check deparsing bytecode 1.1 only
|
||||
check-bytecode-1.1:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.1
|
||||
|
||||
#: Check deparsing bytecode 1.2 only
|
||||
check-bytecode-1.2:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.2
|
||||
|
||||
#: Check deparsing bytecode 1.3 only
|
||||
check-bytecode-1.3:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.3
|
||||
@@ -134,6 +148,10 @@ check-bytecode-1.4:
|
||||
check-bytecode-1.5:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.5
|
||||
|
||||
#: Check deparsing bytecode 1.6 only
|
||||
check-bytecode-1.6:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-1.6
|
||||
|
||||
#: Check deparsing Python 2.1
|
||||
check-bytecode-2.1:
|
||||
$(PYTHON) test_pythonlib.py --bytecode-2.1
|
||||
|
@@ -2,22 +2,23 @@
|
||||
""" Trivial helper program to bytecompile and run an uncompile
|
||||
"""
|
||||
import os, sys, py_compile
|
||||
|
||||
assert len(sys.argv) >= 2
|
||||
version = sys.version[0:3]
|
||||
if sys.argv[1] == '--run':
|
||||
suffix = '_run'
|
||||
if sys.argv[1] in ("--run", "-r"):
|
||||
suffix = "_run"
|
||||
py_source = sys.argv[2:]
|
||||
else:
|
||||
suffix = ''
|
||||
suffix = ""
|
||||
py_source = sys.argv[1:]
|
||||
|
||||
for path in py_source:
|
||||
short = os.path.basename(path)
|
||||
if hasattr(sys, 'pypy_version_info'):
|
||||
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + 'c'
|
||||
if hasattr(sys, "pypy_version_info"):
|
||||
cfile = "bytecode_pypy%s%s/%s" % (version, suffix, short) + "c"
|
||||
else:
|
||||
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + 'c'
|
||||
cfile = "bytecode_%s%s/%s" % (version, suffix, short) + "c"
|
||||
print("byte-compiling %s to %s" % (path, cfile))
|
||||
py_compile.compile(path, cfile)
|
||||
if isinstance(version, str) or version >= (2, 6, 0):
|
||||
os.system("../bin/uncompyle6 -a -t %s" % cfile)
|
||||
os.system("../bin/uncompyle6 -a -T %s" % cfile)
|
||||
|
BIN
test/bytecode_1.0/simple_const.pyc
Normal file
BIN
test/bytecode_1.0/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.0/unpack_assign.pyc
Normal file
BIN
test/bytecode_1.0/unpack_assign.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.1/simple_const.pyc
Normal file
BIN
test/bytecode_1.1/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.2/simple_const.pyc
Normal file
BIN
test/bytecode_1.2/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.3/simple_const.pyc
Normal file
BIN
test/bytecode_1.3/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_1.6/simple_const.pyc
Normal file
BIN
test/bytecode_1.6/simple_const.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/01_assert2.pyc
Normal file
BIN
test/bytecode_3.7_run/01_assert2.pyc
Normal file
Binary file not shown.
8
test/simple_source/bug37/01_assert2.py
Normal file
8
test/simple_source/bug37/01_assert2.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# Self-checking test.
|
||||
# Bug was in if transform not inverting expression
|
||||
# This file is RUNNABLE!
|
||||
def test_assert2(c):
|
||||
if c < 2:
|
||||
raise SyntaxError('Oops')
|
||||
|
||||
test_assert2(5)
|
@@ -75,9 +75,13 @@ for vers in (2.7, 3.4, 3.5, 3.6):
|
||||
pass
|
||||
|
||||
for vers in (
|
||||
1.0,
|
||||
1.1,
|
||||
1.2,
|
||||
1.3,
|
||||
1.4,
|
||||
1.5,
|
||||
1.6,
|
||||
2.1,
|
||||
2.2,
|
||||
2.3,
|
||||
|
@@ -626,12 +626,30 @@ def get_python_parser(
|
||||
|
||||
if version < 3.0:
|
||||
if version < 2.2:
|
||||
if version == 1.0:
|
||||
import uncompyle6.parsers.parse10 as parse10
|
||||
if compile_mode == 'exec':
|
||||
p = parse10.Python10Parser(debug_parser)
|
||||
else:
|
||||
p = parse10.Python01ParserSingle(debug_parser)
|
||||
elif version == 1.1:
|
||||
import uncompyle6.parsers.parse11 as parse11
|
||||
if compile_mode == 'exec':
|
||||
p = parse11.Python11Parser(debug_parser)
|
||||
else:
|
||||
p = parse11.Python11ParserSingle(debug_parser)
|
||||
if version == 1.2:
|
||||
import uncompyle6.parsers.parse12 as parse12
|
||||
if compile_mode == 'exec':
|
||||
p = parse12.Python12Parser(debug_parser)
|
||||
else:
|
||||
p = parse12.Python12ParserSingle(debug_parser)
|
||||
if version == 1.3:
|
||||
import uncompyle6.parsers.parse13 as parse13
|
||||
if compile_mode == 'exec':
|
||||
p = parse13.Python14Parser(debug_parser)
|
||||
p = parse13.Python13Parser(debug_parser)
|
||||
else:
|
||||
p = parse13.Python14ParserSingle(debug_parser)
|
||||
p = parse13.Python13ParserSingle(debug_parser)
|
||||
elif version == 1.4:
|
||||
import uncompyle6.parsers.parse14 as parse14
|
||||
if compile_mode == 'exec':
|
||||
@@ -644,6 +662,12 @@ def get_python_parser(
|
||||
p = parse15.Python15Parser(debug_parser)
|
||||
else:
|
||||
p = parse15.Python15ParserSingle(debug_parser)
|
||||
elif version == 1.6:
|
||||
import uncompyle6.parsers.parse16 as parse16
|
||||
if compile_mode == 'exec':
|
||||
p = parse16.Python16Parser(debug_parser)
|
||||
else:
|
||||
p = parse16.Python16ParserSingle(debug_parser)
|
||||
elif version == 2.1:
|
||||
import uncompyle6.parsers.parse21 as parse21
|
||||
if compile_mode == 'exec':
|
||||
|
25
uncompyle6/parsers/parse10.py
Normal file
25
uncompyle6/parsers/parse10.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse11 import Python11Parser
|
||||
|
||||
|
||||
class Python10Parser(Python11Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python11Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python10ParserSingle(Python10Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python10Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
25
uncompyle6/parsers/parse11.py
Normal file
25
uncompyle6/parsers/parse11.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse12 import Python12Parser
|
||||
|
||||
|
||||
class Python11Parser(Python12Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python12Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python11ParserSingle(Python11Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python12Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
25
uncompyle6/parsers/parse12.py
Normal file
25
uncompyle6/parsers/parse12.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle
|
||||
from uncompyle6.parsers.parse13 import Python13Parser
|
||||
|
||||
|
||||
class Python12Parser(Python13Parser):
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python12Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
|
||||
class Python12ParserSingle(Python12Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check grammar
|
||||
p = Python12Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
46
uncompyle6/parsers/parse16.py
Normal file
46
uncompyle6/parsers/parse16.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Copyright (c) 2019 Rocky Bernstein
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parser import PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.parse21 import Python21Parser
|
||||
|
||||
class Python16Parser(Python21Parser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python16Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
|
||||
def p_import16(self, args):
|
||||
"""
|
||||
import ::= filler IMPORT_NAME STORE_FAST
|
||||
import ::= filler IMPORT_NAME STORE_NAME
|
||||
|
||||
import_from ::= filler IMPORT_NAME importlist
|
||||
import_from ::= filler filler IMPORT_NAME importlist POP_TOP
|
||||
|
||||
importlist ::= importlist IMPORT_FROM
|
||||
importlist ::= IMPORT_FROM
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
super(Python16Parser, self).customize_grammar_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
if opname_base == 'UNPACK_LIST':
|
||||
self.addRule("store ::= unpack_list", nop_func)
|
||||
|
||||
|
||||
|
||||
class Python16ParserSingle(Python16Parser, PythonParserSingle):
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python15Parser()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
@@ -39,7 +39,7 @@ else:
|
||||
|
||||
# The byte code versions we support.
|
||||
# Note: these all have to be floats
|
||||
PYTHON_VERSIONS = frozenset((1.3, 1.4, 1.5,
|
||||
PYTHON_VERSIONS = frozenset((1.0, 1.1, 1.3, 1.4, 1.5, 1.6,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7, 3.8))
|
||||
|
||||
|
35
uncompyle6/scanners/scanner10.py
Normal file
35
uncompyle6/scanners/scanner10.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.0 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.0 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner11 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_10
|
||||
|
||||
JUMP_OPS = opcode_10.JUMP_OPS
|
||||
|
||||
# We base this off of 1.1 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner10(scan.Scanner11):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner11.__init__(self, show_asm)
|
||||
self.opc = opcode_10
|
||||
self.opname = opcode_10.opname
|
||||
self.version = 1.0
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
35
uncompyle6/scanners/scanner11.py
Normal file
35
uncompyle6/scanners/scanner11.py
Normal file
@@ -0,0 +1,35 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.1 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.1 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner13 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_11
|
||||
|
||||
JUMP_OPS = opcode_11.JUMP_OPS
|
||||
|
||||
# We base this off of 1.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner11(scan.Scanner13): # no scanner 1.2
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner13.__init__(self, show_asm)
|
||||
self.opc = opcode_11
|
||||
self.opname = opcode_11.opname
|
||||
self.version = 1.1
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
36
uncompyle6/scanners/scanner12.py
Normal file
36
uncompyle6/scanners/scanner12.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.2 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.2 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner13 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_11
|
||||
|
||||
JUMP_OPS = opcode_11.JUMP_OPS
|
||||
|
||||
# We base this off of 1.3 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner12(scan.Scanner13):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner14.__init__(self, show_asm)
|
||||
self.opc = opcode_11
|
||||
self.opname = opcode_11.opname
|
||||
self.version = 1.2 # Note: is the same as 1.1 bytecode
|
||||
return
|
||||
|
||||
# def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
# tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
# tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
|
||||
# # for t in tokens:
|
||||
# # print(t)
|
||||
#
|
||||
# return tokens, customize
|
@@ -7,10 +7,12 @@ grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner14 as scan
|
||||
|
||||
# from uncompyle6.scanners.scanner26 import ingest as ingest26
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_13
|
||||
|
||||
JUMP_OPS = opcode_13.JUMP_OPS
|
||||
|
||||
# We base this off of 1.4 instead of the other way around
|
||||
|
41
uncompyle6/scanners/scanner16.py
Normal file
41
uncompyle6/scanners/scanner16.py
Normal file
@@ -0,0 +1,41 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
"""
|
||||
Python 1.6 bytecode decompiler massaging.
|
||||
|
||||
This massages tokenized 1.6 bytecode to make it more amenable for
|
||||
grammar parsing.
|
||||
"""
|
||||
|
||||
import uncompyle6.scanners.scanner21 as scan
|
||||
# from uncompyle6.scanners.scanner26 import ingest as ingest26
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_16
|
||||
JUMP_OPS = opcode_16.JUMP_OPS
|
||||
|
||||
# We base this off of 2.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
# The history is that 2.7 support is the cleanest,
|
||||
# then from that we got 2.6 and so on.
|
||||
class Scanner16(scan.Scanner21):
|
||||
def __init__(self, show_asm=False):
|
||||
scan.Scanner21.__init__(self, show_asm)
|
||||
self.opc = opcode_16
|
||||
self.opname = opcode_16.opname
|
||||
self.version = 1.6
|
||||
self.genexpr_name = '<generator expression>'
|
||||
return
|
||||
|
||||
def ingest(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
"""
|
||||
Pick out tokens from an uncompyle6 code object, and transform them,
|
||||
returning a list of uncompyle6 Token's.
|
||||
|
||||
The transformations are made to assist the deparsing grammar.
|
||||
"""
|
||||
tokens, customize = scan.Scanner21.ingest(self, co, classname, code_objects, show_asm)
|
||||
for t in tokens:
|
||||
if t.op == self.opc.UNPACK_LIST:
|
||||
t.kind = 'UNPACK_LIST_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
@@ -418,7 +418,6 @@ TABLE_DIRECT = {
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
|
||||
'pass': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'import': ( '%|import %c\n', 2),
|
||||
|
@@ -16,7 +16,7 @@
|
||||
"""Isolate Python version-specific semantic actions here.
|
||||
"""
|
||||
|
||||
from uncompyle6.semantics.consts import TABLE_R, TABLE_DIRECT
|
||||
from uncompyle6.semantics.consts import PRECEDENCE, TABLE_R, TABLE_DIRECT
|
||||
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6.scanners.tok import Token
|
||||
@@ -27,29 +27,31 @@ def customize_for_version(self, is_pypy, version):
|
||||
########################
|
||||
# PyPy changes
|
||||
#######################
|
||||
TABLE_DIRECT.update(
|
||||
{
|
||||
"assert2_pypy": ("%|assert %c, %c\n", (1, "assert_expr"), 4),
|
||||
"assert_pypy": ("%|assert %c\n", (1, "assert_expr")),
|
||||
"assign2_pypy": ("%|%c, %c = %c, %c\n", 3, 2, 0, 1),
|
||||
"assign3_pypy": ("%|%c, %c, %c = %c, %c, %c\n", 5, 4, 3, 0, 1, 2),
|
||||
"try_except_pypy": ("%|try:\n%+%c%-%c\n\n", 1, 2),
|
||||
"tryfinallystmt_pypy": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", 1, 3),
|
||||
}
|
||||
)
|
||||
TABLE_DIRECT.update({
|
||||
'assert_pypy': ( '%|assert %c\n' , 1 ),
|
||||
'assert2_pypy': ( '%|assert %c, %c\n' , 1, 4 ),
|
||||
'try_except_pypy': ( '%|try:\n%+%c%-%c\n\n', 1, 2 ),
|
||||
'tryfinallystmt_pypy': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 3 ),
|
||||
'assign3_pypy': ( '%|%c, %c, %c = %c, %c, %c\n', 5, 4, 3, 0, 1, 2 ),
|
||||
'assign2_pypy': ( '%|%c, %c = %c, %c\n', 3, 2, 0, 1),
|
||||
})
|
||||
else:
|
||||
########################
|
||||
# Without PyPy
|
||||
#######################
|
||||
TABLE_DIRECT.update(
|
||||
{
|
||||
"assert": ("%|assert %c\n", (0, "assert_expr")),
|
||||
"assert2": ("%|assert %c, %c\n", (0, "assert_expr"), 3),
|
||||
"assign2": ("%|%c, %c = %c, %c\n", 3, 4, 0, 1),
|
||||
"assign3": ("%|%c, %c, %c = %c, %c, %c\n", 5, 6, 7, 0, 1, 2),
|
||||
"try_except": ("%|try:\n%+%c%-%c\n\n", 1, 3),
|
||||
}
|
||||
)
|
||||
TABLE_DIRECT.update({
|
||||
"assert": ("%|assert %c\n", (0, "assert_expr")),
|
||||
"assert2": ("%|assert %c, %c\n", (0, "assert_expr"), 3),
|
||||
|
||||
# Created only via transformation
|
||||
"assertnot": ("%|assert not %p\n", (0, PRECEDENCE['unary_not'])),
|
||||
"assert2not": ( "%|assert not %p, %c\n" ,
|
||||
(0, PRECEDENCE['unary_not']), 3 ),
|
||||
|
||||
"assign2": ("%|%c, %c = %c, %c\n", 3, 4, 0, 1),
|
||||
"assign3": ("%|%c, %c, %c = %c, %c, %c\n", 5, 6, 7, 0, 1, 2),
|
||||
"try_except": ("%|try:\n%+%c%-%c\n\n", 1, 3),
|
||||
})
|
||||
if version >= 3.0:
|
||||
if version >= 3.2:
|
||||
TABLE_DIRECT.update(
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1443,7 +1443,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
list_if = None
|
||||
assert n == "comp_iter"
|
||||
|
||||
# find innermost node
|
||||
# find inner-most node
|
||||
while n == "comp_iter":
|
||||
n = n[0] # recurse one step
|
||||
# FIXME: adjust for set comprehension
|
||||
@@ -2232,7 +2232,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# if lhs is not a UNPACK_TUPLE (or equiv.),
|
||||
# add parenteses to make this a tuple
|
||||
# if node[1][0] not in ('unpack', 'unpack_list'):
|
||||
return "(" + self.traverse(node[1]) + ")"
|
||||
result = self.traverse(node[1])
|
||||
if not (result.startswith("(") and result.endswith(")") ):
|
||||
result = "(%s)" % result
|
||||
return result
|
||||
# return self.traverse(node[1])
|
||||
raise Exception("Can't find tuple parameter " + name)
|
||||
|
||||
|
@@ -88,7 +88,7 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
if raise_stmt == "raise_stmt1" and len(testexpr[0]) == 2:
|
||||
assert_expr = testexpr[0][0]
|
||||
assert_expr.kind = "assert_expr"
|
||||
jmp_true = testexpr[0][1]
|
||||
jump_cond = testexpr[0][1]
|
||||
expr = raise_stmt[0]
|
||||
RAISE_VARARGS_1 = raise_stmt[1]
|
||||
if expr[0] == "call":
|
||||
@@ -105,15 +105,19 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
# 1. RAISE_VARARGS_1
|
||||
# becomes:
|
||||
# assert2 ::= assert_expr jmp_true LOAD_ASSERT expr RAISE_VARARGS_1 COME_FROM
|
||||
if jump_cond == "jmp_true":
|
||||
kind = "assert2"
|
||||
else:
|
||||
assert jump_cond == "jmp_false"
|
||||
kind = "assert2not"
|
||||
|
||||
call = expr[0]
|
||||
LOAD_ASSERT = call[0]
|
||||
expr = call[1][0]
|
||||
node = SyntaxTree(
|
||||
"assert2",
|
||||
[assert_expr, jmp_true, LOAD_ASSERT, expr, RAISE_VARARGS_1]
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, expr, RAISE_VARARGS_1]
|
||||
)
|
||||
node.transformed_by="n_ifstmt",
|
||||
|
||||
else:
|
||||
# ifstmt
|
||||
# 0. testexpr (2)
|
||||
@@ -128,12 +132,18 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
# 1. RAISE_VARARGS_1
|
||||
# becomes:
|
||||
# assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||
if jump_cond == "jmp_true":
|
||||
kind = "assert"
|
||||
else:
|
||||
assert jump_cond == "jmp_false"
|
||||
kind = "assertnot"
|
||||
|
||||
LOAD_ASSERT = expr[0]
|
||||
node = SyntaxTree(
|
||||
"assert",
|
||||
[assert_expr, jmp_true, LOAD_ASSERT, RAISE_VARARGS_1]
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
|
||||
)
|
||||
node.transformed_by="n_ifstmt",
|
||||
node.transformed_by="n_ifstmt",
|
||||
pass
|
||||
pass
|
||||
return node
|
||||
|
@@ -12,4 +12,4 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION="3.4.0" # noqa
|
||||
VERSION="3.5.0" # noqa
|
||||
|
Reference in New Issue
Block a user