You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 09:22:40 +08:00
Merge branch 'master' of github.com:rocky/python-uncompyle6
This commit is contained in:
2
Makefile
2
Makefile
@@ -33,7 +33,7 @@ check-2.7 check-3.3 check-3.4: pytest
|
|||||||
|
|
||||||
#: Tests for Python 3.2 and 3.5 - pytest doesn't work here
|
#: Tests for Python 3.2 and 3.5 - pytest doesn't work here
|
||||||
# Or rather 3.5 doesn't work not on Travis
|
# Or rather 3.5 doesn't work not on Travis
|
||||||
check-3.2 check-3.5:
|
check-3.2 check-3.5 check-3.6:
|
||||||
$(MAKE) -C test $@
|
$(MAKE) -C test $@
|
||||||
|
|
||||||
#:Tests for Python 2.6 (doesn't have pytest)
|
#:Tests for Python 2.6 (doesn't have pytest)
|
||||||
|
@@ -11,7 +11,7 @@ Introduction
|
|||||||
------------
|
------------
|
||||||
|
|
||||||
*uncompyle6* translates Python bytecode back into equivalent Python
|
*uncompyle6* translates Python bytecode back into equivalent Python
|
||||||
source code. It accepts bytecodes from Python version 2.3 to 3.5 or
|
source code. It accepts bytecodes from Python version 2.3 to 3.6 or
|
||||||
so, including PyPy bytecode.
|
so, including PyPy bytecode.
|
||||||
|
|
||||||
Why this?
|
Why this?
|
||||||
@@ -45,7 +45,7 @@ Requirements
|
|||||||
|
|
||||||
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
This project requires Python 2.6 or later, PyPy 3-2.4, or PyPy-5.0.1.
|
||||||
The bytecode files it can read has been tested on Python bytecodes from
|
The bytecode files it can read has been tested on Python bytecodes from
|
||||||
versions 2.3-2.7, and 3.2-3.5 and the above-mentioned PyPy versions.
|
versions 2.3-2.7, and 3.2-3.6 and the above-mentioned PyPy versions.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
------------
|
------------
|
||||||
|
@@ -37,7 +37,7 @@ entry_points={
|
|||||||
]}
|
]}
|
||||||
ftp_url = None
|
ftp_url = None
|
||||||
install_requires = ['spark-parser >= 1.4.0',
|
install_requires = ['spark-parser >= 1.4.0',
|
||||||
'xdis >= 2.0.3']
|
'xdis >= 2.1.0']
|
||||||
license = 'MIT'
|
license = 'MIT'
|
||||||
mailing_list = 'python-debugger@googlegroups.com'
|
mailing_list = 'python-debugger@googlegroups.com'
|
||||||
modname = 'uncompyle6'
|
modname = 'uncompyle6'
|
||||||
|
@@ -1,2 +1,2 @@
|
|||||||
spark-parser >= 1.2.1
|
spark-parser >= 1.2.1
|
||||||
xdis >= 2.0.3
|
xdis >= 2.1.0
|
||||||
|
@@ -38,6 +38,10 @@ check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
|||||||
check-3.5: check-bytecode
|
check-3.5: check-bytecode
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-3.5 --verify $(COMPILE)
|
$(PYTHON) test_pythonlib.py --bytecode-3.5 --verify $(COMPILE)
|
||||||
|
|
||||||
|
#: Run working tests from Python 3.6
|
||||||
|
check-3.6: check-bytecode
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-3.6 --verify $(COMPILE)
|
||||||
|
|
||||||
#: Check deparsing only, but from a different Python version
|
#: Check deparsing only, but from a different Python version
|
||||||
check-disasm:
|
check-disasm:
|
||||||
$(PYTHON) dis-compare.py
|
$(PYTHON) dis-compare.py
|
||||||
@@ -50,7 +54,7 @@ check-bytecode-2:
|
|||||||
#: Check deparsing bytecode 3.x only
|
#: Check deparsing bytecode 3.x only
|
||||||
check-bytecode-3:
|
check-bytecode-3:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-3.2 --bytecode-3.3 \
|
$(PYTHON) test_pythonlib.py --bytecode-3.2 --bytecode-3.3 \
|
||||||
--bytecode-3.4 --bytecode-3.5 --bytecode-pypy3.2
|
--bytecode-3.4 --bytecode-3.5 --bytecode-3.6 --bytecode-pypy3.2
|
||||||
|
|
||||||
#: Check deparsing bytecode that works running Python 2 and Python 3
|
#: Check deparsing bytecode that works running Python 2 and Python 3
|
||||||
check-bytecode: check-bytecode-3
|
check-bytecode: check-bytecode-3
|
||||||
@@ -93,6 +97,10 @@ check-bytecode-3.4:
|
|||||||
check-bytecode-3.5:
|
check-bytecode-3.5:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-3.5
|
$(PYTHON) test_pythonlib.py --bytecode-3.5
|
||||||
|
|
||||||
|
#: Check deparsing Python 3.6
|
||||||
|
check-bytecode-3.6:
|
||||||
|
$(PYTHON) test_pythonlib.py --bytecode-3.6
|
||||||
|
|
||||||
#: short tests for bytecodes only for this version of Python
|
#: short tests for bytecodes only for this version of Python
|
||||||
check-native-short:
|
check-native-short:
|
||||||
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
$(PYTHON) test_pythonlib.py --bytecode-$(PYTHON_VERSION) --verify $(COMPILE)
|
||||||
|
BIN
test/bytecode_3.6/01_fstring.pyc
Normal file
BIN
test/bytecode_3.6/01_fstring.pyc
Normal file
Binary file not shown.
3
test/simple_source/bug36/01_fstring.py
Normal file
3
test/simple_source/bug36/01_fstring.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
var1 = 'x'
|
||||||
|
var2 = 'y'
|
||||||
|
print(f'interpolate {var1} strings {var2} py36')
|
@@ -72,13 +72,13 @@ test_options = {
|
|||||||
PYOC, 'base_2.7', 2.7),
|
PYOC, 'base_2.7', 2.7),
|
||||||
}
|
}
|
||||||
|
|
||||||
for vers in (2.7, 3.4, 3.5):
|
for vers in (2.7, 3.4, 3.5, 3.6):
|
||||||
pythonlib = "ok_lib%s" % vers
|
pythonlib = "ok_lib%s" % vers
|
||||||
key = "ok-%s" % vers
|
key = "ok-%s" % vers
|
||||||
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
test_options[key] = (os.path.join(src_dir, pythonlib), PYOC, key, vers)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
for vers in (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 'pypy3.2', 'pypy2.7'):
|
for vers in (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, 'pypy3.2', 'pypy2.7'):
|
||||||
bytecode = "bytecode_%s" % vers
|
bytecode = "bytecode_%s" % vers
|
||||||
key = "bytecode-%s" % vers
|
key = "bytecode-%s" % vers
|
||||||
test_options[key] = (bytecode, PYC, bytecode, vers)
|
test_options[key] = (bytecode, PYC, bytecode, vers)
|
||||||
|
@@ -64,8 +64,8 @@ def usage():
|
|||||||
|
|
||||||
|
|
||||||
def main_bin():
|
def main_bin():
|
||||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7), (3, 2), (3, 3), (3, 4), (3, 5))):
|
if not (sys.version_info[0:2] in ((2, 6), (2, 7), (3, 2), (3, 3), (3, 4), (3, 5), (3, 6))):
|
||||||
print('Error: %s requires Python 2.6, 2.7, 3.2, 3.3, 3.4 or 3.5' % program,
|
print('Error: %s requires Python 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, or 3.6' % program,
|
||||||
file=sys.stderr)
|
file=sys.stderr)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
|
@@ -365,7 +365,7 @@ class Python3Parser(PythonParser):
|
|||||||
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
call_function ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||||
|
|
||||||
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc {expr}^n-1 CALL_FUNCTION_n
|
||||||
"""
|
"""
|
||||||
# Low byte indicates number of positional paramters,
|
# Low byte indicates number of positional paramters,
|
||||||
# high byte number of positional parameters
|
# high byte number of positional parameters
|
||||||
args_pos = token.attr & 0xff
|
args_pos = token.attr & 0xff
|
||||||
@@ -444,7 +444,8 @@ class Python3Parser(PythonParser):
|
|||||||
For PYPY:
|
For PYPY:
|
||||||
load_attr ::= expr LOOKUP_METHOD
|
load_attr ::= expr LOOKUP_METHOD
|
||||||
call_function ::= expr CALL_METHOD
|
call_function ::= expr CALL_METHOD
|
||||||
"""
|
"""
|
||||||
|
saw_format_value = False
|
||||||
for i, token in enumerate(tokens):
|
for i, token in enumerate(tokens):
|
||||||
opname = token.type
|
opname = token.type
|
||||||
opname_base = opname[:opname.rfind('_')]
|
opname_base = opname[:opname.rfind('_')]
|
||||||
@@ -457,8 +458,19 @@ class Python3Parser(PythonParser):
|
|||||||
assign2_pypy ::= expr expr designator designator
|
assign2_pypy ::= expr expr designator designator
|
||||||
""", nop_func)
|
""", nop_func)
|
||||||
continue
|
continue
|
||||||
|
elif opname == 'FORMAT_VALUE':
|
||||||
|
# Python 3.6+
|
||||||
|
self.addRule("""
|
||||||
|
formatted_value ::= LOAD_FAST FORMAT_VALUE
|
||||||
|
formatted_value ::= LOAD_NAME FORMAT_VALUE
|
||||||
|
str ::= LOAD_CONST
|
||||||
|
formatted_value_or_str ::= formatted_value
|
||||||
|
formatted_value_or_str ::= str
|
||||||
|
""", nop_func)
|
||||||
|
saw_format_value = True
|
||||||
|
|
||||||
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
elif opname in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||||
self.custom_classfunc_rule(opname, token, customize)
|
self.custom_classfunc_rule(opname, token, customize)
|
||||||
elif opname == 'LOAD_DICTCOMP':
|
elif opname == 'LOAD_DICTCOMP':
|
||||||
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
rule_pat = ("dictcomp ::= LOAD_DICTCOMP %sMAKE_FUNCTION_0 expr "
|
||||||
@@ -479,6 +491,15 @@ class Python3Parser(PythonParser):
|
|||||||
if opname_base == 'BUILD_TUPLE':
|
if opname_base == 'BUILD_TUPLE':
|
||||||
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
rule = ('load_closure ::= %s%s' % (('LOAD_CLOSURE ' * v), opname))
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
if opname_base == 'BUILD_LIST' and saw_format_value:
|
||||||
|
format_or_str_n = "formatted_value_or_str_%s" % v
|
||||||
|
self.addRule("""
|
||||||
|
expr ::= joined_str
|
||||||
|
joined_str ::= LOAD_CONST LOAD_ATTR %s CALL_FUNCTION_1
|
||||||
|
%s ::= %s%s
|
||||||
|
""" % (format_or_str_n, format_or_str_n, ("formatted_value_or_str " *v), opname),
|
||||||
|
nop_func)
|
||||||
|
|
||||||
elif opname == 'LOOKUP_METHOD':
|
elif opname == 'LOOKUP_METHOD':
|
||||||
# A PyPy speciality - DRY with parse2
|
# A PyPy speciality - DRY with parse2
|
||||||
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
self.add_unique_rule("load_attr ::= expr LOOKUP_METHOD",
|
||||||
|
52
uncompyle6/parsers/parse36.py
Normal file
52
uncompyle6/parsers/parse36.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# Copyright (c) 2016 Rocky Bernstein
|
||||||
|
"""
|
||||||
|
spark grammar differences over Python 3.5 for Python 3.6.
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from uncompyle6.parser import PythonParserSingle
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
from uncompyle6.parsers.parse35 import Python35Parser
|
||||||
|
|
||||||
|
class Python36Parser(Python35Parser):
|
||||||
|
|
||||||
|
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||||
|
super(Python36Parser, self).__init__(debug_parser)
|
||||||
|
self.customized = {}
|
||||||
|
|
||||||
|
def p_36misc(self, args):
|
||||||
|
"""
|
||||||
|
formatted_value ::= LOAD_FAST FORMAT_VALUE
|
||||||
|
str ::= LOAD_CONST
|
||||||
|
joined_str ::= LOAD_CONST LOAD_ATTR format_value_or_strs
|
||||||
|
BUILD_LIST CALL_FUNCTION
|
||||||
|
format_value_or_strs ::= format_value_or_strs format_value_or_str
|
||||||
|
format_value_or_strs ::= format_value_or_str
|
||||||
|
format_value_or_str ::= format_value
|
||||||
|
format_value_or_str ::= str
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Check grammar
|
||||||
|
p = Python36Parser()
|
||||||
|
p.checkGrammar()
|
||||||
|
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||||
|
if PYTHON_VERSION == 3.6:
|
||||||
|
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||||
|
opcode_set = set(s.opc.opname).union(set(
|
||||||
|
"""JUMP_BACK CONTINUE RETURN_END_IF COME_FROM
|
||||||
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_CLASSNAME
|
||||||
|
LAMBDA_MARKER RETURN_LAST
|
||||||
|
""".split()))
|
||||||
|
remain_tokens = set(tokens) - opcode_set
|
||||||
|
import re
|
||||||
|
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||||
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
|
print(remain_tokens)
|
||||||
|
# print(sorted(p.rule2name.items()))
|
@@ -22,7 +22,7 @@ from uncompyle6 import PYTHON3, IS_PYPY
|
|||||||
from uncompyle6.scanners.tok import Token
|
from uncompyle6.scanners.tok import Token
|
||||||
|
|
||||||
# The byte code versions we support
|
# The byte code versions we support
|
||||||
PYTHON_VERSIONS = (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5)
|
PYTHON_VERSIONS = (2.3, 2.4, 2.5, 2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||||
|
|
||||||
# FIXME: DRY
|
# FIXME: DRY
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
|
35
uncompyle6/scanners/scanner36.py
Normal file
35
uncompyle6/scanners/scanner36.py
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# Copyright (c) 2016 by Rocky Bernstein
|
||||||
|
"""
|
||||||
|
Python 3.5 bytecode scanner/deparser
|
||||||
|
|
||||||
|
This sets up opcodes Python's 3.5 and calls a generalized
|
||||||
|
scanner routine for Python 3.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from uncompyle6.scanners.scanner3 import Scanner3
|
||||||
|
|
||||||
|
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||||
|
from xdis.opcodes import opcode_36 as opc
|
||||||
|
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||||
|
|
||||||
|
class Scanner36(Scanner3):
|
||||||
|
|
||||||
|
def __init__(self, show_asm=None):
|
||||||
|
Scanner3.__init__(self, 3.6, show_asm)
|
||||||
|
return
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
from uncompyle6 import PYTHON_VERSION
|
||||||
|
if PYTHON_VERSION == 3.6:
|
||||||
|
import inspect
|
||||||
|
co = inspect.currentframe().f_code
|
||||||
|
tokens, customize = Scanner36().disassemble(co)
|
||||||
|
for t in tokens:
|
||||||
|
print(t.format())
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print("Need to be Python 3.6 to demo; I am %s." %
|
||||||
|
PYTHON_VERSION)
|
@@ -2,7 +2,7 @@
|
|||||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||||
# Copyright (c) 1999 John Aycock
|
# Copyright (c) 1999 John Aycock
|
||||||
|
|
||||||
import sys
|
import re, sys
|
||||||
from uncompyle6 import PYTHON3
|
from uncompyle6 import PYTHON3
|
||||||
|
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
@@ -71,8 +71,10 @@ class Token:
|
|||||||
elif self.op in self.opc.hascompare:
|
elif self.op in self.opc.hascompare:
|
||||||
if isinstance(self.attr, int):
|
if isinstance(self.attr, int):
|
||||||
pattr = self.opc.cmp_op[self.attr]
|
pattr = self.opc.cmp_op[self.attr]
|
||||||
# And so on. See xdis/bytecode.py get_instructions_bytes
|
# And so on. See xdis/bytecode.py get_instructions_bytes
|
||||||
pass
|
pass
|
||||||
|
elif re.search('_\d+$', self.type):
|
||||||
|
return "%s%s%s" % (prefix, offset_opname, argstr)
|
||||||
else:
|
else:
|
||||||
pattr = ''
|
pattr = ''
|
||||||
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
||||||
|
147
uncompyle6/semantics/aligner.py
Normal file
147
uncompyle6/semantics/aligner.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import sys
|
||||||
|
from uncompyle6.semantics.pysource import (
|
||||||
|
SourceWalker, SourceWalkerError, find_globals, ASSIGN_DOC_STRING, RETURN_NONE)
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
class AligningWalker(SourceWalker, object):
|
||||||
|
def __init__(self, version, scanner, out, showast=False,
|
||||||
|
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||||
|
compile_mode='exec', is_pypy=False):
|
||||||
|
SourceWalker.__init__(self, version, out, scanner, showast, debug_parser,
|
||||||
|
compile_mode, is_pypy)
|
||||||
|
self.desired_line_number = 0
|
||||||
|
self.current_line_number = 0
|
||||||
|
|
||||||
|
def println(self, *data):
|
||||||
|
if data and not(len(data) == 1 and data[0] ==''):
|
||||||
|
self.write(*data)
|
||||||
|
|
||||||
|
self.pending_newlines = max(self.pending_newlines, 1)
|
||||||
|
|
||||||
|
def write(self, *data):
|
||||||
|
from trepan.api import debug; debug()
|
||||||
|
if (len(data) == 1) and data[0] == self.indent:
|
||||||
|
diff = max(self.pending_newlines,
|
||||||
|
self.desired_line_number - self.current_line_number)
|
||||||
|
self.f.write('\n'*diff)
|
||||||
|
self.current_line_number += diff
|
||||||
|
self.pending_newlines = 0
|
||||||
|
if (len(data) == 0) or (len(data) == 1 and data[0] == ''):
|
||||||
|
return
|
||||||
|
|
||||||
|
out = ''.join((str(j) for j in data))
|
||||||
|
n = 0
|
||||||
|
for i in out:
|
||||||
|
if i == '\n':
|
||||||
|
n += 1
|
||||||
|
if n == len(out):
|
||||||
|
self.pending_newlines = max(self.pending_newlines, n)
|
||||||
|
return
|
||||||
|
elif n:
|
||||||
|
self.pending_newlines = max(self.pending_newlines, n)
|
||||||
|
out = out[n:]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.pending_newlines > 0:
|
||||||
|
diff = max(self.pending_newlines,
|
||||||
|
self.desired_line_number - self.current_line_number)
|
||||||
|
self.f.write('\n'*diff)
|
||||||
|
self.current_line_number += diff
|
||||||
|
self.pending_newlines = 0
|
||||||
|
|
||||||
|
for i in out[::-1]:
|
||||||
|
if i == '\n':
|
||||||
|
self.pending_newlines += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.pending_newlines:
|
||||||
|
out = out[:-self.pending_newlines]
|
||||||
|
self.f.write(out)
|
||||||
|
|
||||||
|
def default(self, node):
|
||||||
|
mapping = self._get_mapping(node)
|
||||||
|
if hasattr(node, 'linestart'):
|
||||||
|
if node.linestart:
|
||||||
|
self.desired_line_number = node.linestart
|
||||||
|
table = mapping[0]
|
||||||
|
key = node
|
||||||
|
|
||||||
|
for i in mapping[1:]:
|
||||||
|
key = key[i]
|
||||||
|
pass
|
||||||
|
|
||||||
|
if key.type in table:
|
||||||
|
self.engine(table[key.type], node)
|
||||||
|
self.prune()
|
||||||
|
|
||||||
|
from xdis.code import iscode
|
||||||
|
from uncompyle6.scanner import get_scanner
|
||||||
|
from uncompyle6.show import (
|
||||||
|
maybe_show_asm,
|
||||||
|
)
|
||||||
|
|
||||||
|
def align_deparse_code(version, co, out=sys.stderr, showasm=False, showast=False,
|
||||||
|
showgrammar=False, code_objects={}, compile_mode='exec', is_pypy=False):
|
||||||
|
"""
|
||||||
|
disassembles and deparses a given code block 'co'
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert iscode(co)
|
||||||
|
# store final output stream for case of error
|
||||||
|
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||||
|
|
||||||
|
tokens, customize = scanner.disassemble(co, code_objects=code_objects)
|
||||||
|
maybe_show_asm(showasm, tokens)
|
||||||
|
|
||||||
|
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||||
|
if showgrammar:
|
||||||
|
debug_parser['reduce'] = showgrammar
|
||||||
|
debug_parser['errorstack'] = True
|
||||||
|
|
||||||
|
# Build AST from disassembly.
|
||||||
|
deparsed = AligningWalker(version, scanner, out, showast=showast,
|
||||||
|
debug_parser=debug_parser, compile_mode=compile_mode,
|
||||||
|
is_pypy = is_pypy)
|
||||||
|
|
||||||
|
isTopLevel = co.co_name == '<module>'
|
||||||
|
deparsed.ast = deparsed.build_ast(tokens, customize, isTopLevel=isTopLevel)
|
||||||
|
|
||||||
|
assert deparsed.ast == 'stmts', 'Should have parsed grammar start'
|
||||||
|
|
||||||
|
del tokens # save memory
|
||||||
|
|
||||||
|
deparsed.mod_globs = find_globals(deparsed.ast, set())
|
||||||
|
|
||||||
|
# convert leading '__doc__ = "..." into doc string
|
||||||
|
try:
|
||||||
|
if deparsed.ast[0][0] == ASSIGN_DOC_STRING(co.co_consts[0]):
|
||||||
|
deparsed.print_docstring('', co.co_consts[0])
|
||||||
|
del deparsed.ast[0]
|
||||||
|
if deparsed.ast[-1] == RETURN_NONE:
|
||||||
|
deparsed.ast.pop() # remove last node
|
||||||
|
# todo: if empty, add 'pass'
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# What we've been waiting for: Generate source from AST!
|
||||||
|
deparsed.gen_source(deparsed.ast, co.co_name, customize)
|
||||||
|
|
||||||
|
for g in deparsed.mod_globs:
|
||||||
|
deparsed.write('# global %s ## Warning: Unused global' % g)
|
||||||
|
|
||||||
|
if deparsed.ERROR:
|
||||||
|
raise SourceWalkerError("Deparsing stopped due to parse error")
|
||||||
|
return deparsed
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
def deparse_test(co):
|
||||||
|
"This is a docstring"
|
||||||
|
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||||
|
# deparsed = deparse_code(sys_version, co, showasm=True, showast=True)
|
||||||
|
deparsed = align_deparse_code(sys_version, co, showasm=False, showast=False,
|
||||||
|
showgrammar=False)
|
||||||
|
print(deparsed.text)
|
||||||
|
return
|
||||||
|
deparse_test(deparse_test.__code__)
|
@@ -586,6 +586,14 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
TABLE_DIRECT.update({
|
TABLE_DIRECT.update({
|
||||||
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
'LOAD_CLASSDEREF': ( '%{pattr}', ),
|
||||||
})
|
})
|
||||||
|
if version >= 3.6:
|
||||||
|
########################
|
||||||
|
# Python 3.6+ Additions
|
||||||
|
#######################
|
||||||
|
TABLE_DIRECT.update({
|
||||||
|
'formatted_value': ( '{%c}', 0),
|
||||||
|
'joined_str': ( "f'%c'", 2),
|
||||||
|
})
|
||||||
return
|
return
|
||||||
|
|
||||||
f = property(lambda s: s.params['f'],
|
f = property(lambda s: s.params['f'],
|
||||||
@@ -836,6 +844,10 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.prec += 1
|
self.prec += 1
|
||||||
self.prune()
|
self.prune()
|
||||||
|
|
||||||
|
def n_str(self, node):
|
||||||
|
self.write(node[0].pattr)
|
||||||
|
self.prune()
|
||||||
|
|
||||||
def n_LOAD_CONST(self, node):
|
def n_LOAD_CONST(self, node):
|
||||||
data = node.pattr; datatype = type(data)
|
data = node.pattr; datatype = type(data)
|
||||||
if isinstance(datatype, int) and data == minint:
|
if isinstance(datatype, int) and data == minint:
|
||||||
|
@@ -215,6 +215,9 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, name=''):
|
|||||||
elif version == 3.5:
|
elif version == 3.5:
|
||||||
import uncompyle6.scanners.scanner35 as scan
|
import uncompyle6.scanners.scanner35 as scan
|
||||||
scanner = scan.Scanner35()
|
scanner = scan.Scanner35()
|
||||||
|
elif version == 3.6:
|
||||||
|
import uncompyle6.scanners.scanner36 as scan
|
||||||
|
scanner = scan.Scanner36()
|
||||||
|
|
||||||
global JUMP_OPs
|
global JUMP_OPs
|
||||||
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
||||||
|
Reference in New Issue
Block a user