You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 08:49:51 +08:00
Compare commits
44 Commits
release-py
...
release-py
Author | SHA1 | Date | |
---|---|---|---|
|
9e37495493 | ||
|
77b93c5f21 | ||
|
0b198ee881 | ||
|
9e0c65881d | ||
|
c796d6a799 | ||
|
3892fb533a | ||
|
2ea7487ca7 | ||
|
d4f6cec3d0 | ||
|
b1705e283d | ||
|
eee751e22a | ||
|
2b0fefb95f | ||
|
1d7a3c6444 | ||
|
e7778f83f2 | ||
|
b51039ac1e | ||
|
1a627ba207 | ||
|
f73f0ba41c | ||
|
114f979555 | ||
|
ea75bcf47e | ||
|
6c6dcab857 | ||
|
0654aed6c8 | ||
|
7b38d2f1f8 | ||
|
dfbd60231b | ||
|
8b67f2ccd0 | ||
|
3447ca0767 | ||
|
aadea7224d | ||
|
1e858efafd | ||
|
da7421da1c | ||
|
ce88a72ea1 | ||
|
96ca68a6fe | ||
|
147b6e1cfe | ||
|
7725b8e7de | ||
|
d7b12f4da1 | ||
|
62ddbe320d | ||
|
c7b9e54e59 | ||
|
a694601264 | ||
|
3003070acb | ||
|
19d6dedcf5 | ||
|
51ad3fb36e | ||
|
f017acce21 | ||
|
5bef5683e4 | ||
|
4e1467adc8 | ||
|
7cdf0abb43 | ||
|
9b336251a7 | ||
|
7844456e1e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,4 +17,5 @@
|
||||
__pycache__
|
||||
build
|
||||
/.venv*
|
||||
/.idea
|
||||
/.idea
|
||||
/.hypothesis
|
||||
|
92
ChangeLog
92
ChangeLog
@@ -1,3 +1,95 @@
|
||||
2017-10-10 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse24.py, uncompyle6/scanners/scanner3.py:
|
||||
Misc bugs
|
||||
|
||||
2017-10-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/simple_source/branching/02_ifelse_lambda.py: One more test
|
||||
|
||||
2017-10-05 rocky <rb@dustyfeet.com>
|
||||
|
||||
* .gitignore, pytest/test_grammar.py, uncompyle6/parser.py,
|
||||
uncompyle6/parsers/parse26.py, uncompyle6/parsers/parse27.py,
|
||||
uncompyle6/parsers/parse3.py, uncompyle6/semantics/consts.py,
|
||||
uncompyle6/semantics/pysource.py: Sync with master
|
||||
|
||||
2017-10-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse24.py,
|
||||
uncompyle6/parsers/parse26.py: handle newer parser reduction
|
||||
behavior
|
||||
|
||||
2017-10-03 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Go over table-semantics
|
||||
description yet again
|
||||
|
||||
2017-10-02 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse2.py, uncompyle6/parsers/parse3.py: Sync
|
||||
with master
|
||||
|
||||
2017-09-26 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/parsers/parse3.py: Annotation field can be unicode... When deparsing Python 3.x from Python 2.
|
||||
|
||||
2017-09-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* __pkginfo__.py: Require xdis 3.6.0 or greater
|
||||
|
||||
2017-09-25 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 0654aed6c823d0bb20abdc866481ca5950db72f7 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Sep 21 11:29:17 2017 -0400
|
||||
|
||||
2017-09-21 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_pysource.py, uncompyle6/semantics/consts.py,
|
||||
uncompyle6/semantics/fragments.py, uncompyle6/semantics/pysource.py:
|
||||
Unit test for format-specifiers
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: Tidy pysource and fragments
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/consts.py: Tidy/regularize table entry
|
||||
formatting
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* test/test_pythonlib.py, uncompyle6/semantics/pysource.py: small
|
||||
fixes... test_pythonlib.py: it is sys.exit not exit pysource.py: restore node
|
||||
type on async_call function
|
||||
|
||||
2017-09-20 rocky <rb@dustyfeet.com>
|
||||
|
||||
* pytest/test_pysource.py, uncompyle6/semantics/pysource.py: Start
|
||||
pysource unit test
|
||||
|
||||
2017-09-17 rocky <rb@dustyfeet.com>
|
||||
|
||||
* uncompyle6/semantics/fragments.py,
|
||||
uncompyle6/semantics/pysource.py: emgine -> template_engine
|
||||
|
||||
2017-08-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 356ea6c7705a557cb3e725d1aca8589dd62b5cdf Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Aug 31 09:50:48 2017 -0400
|
||||
|
||||
2017-08-31 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit 4d5843851543bfb3c97fc3c49036f1a971fc1d66 Author: rocky
|
||||
<rb@dustyfeet.com> Date: Thu Aug 31 08:53:58 2017 -0400
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* Makefile: 3.7 support
|
||||
|
||||
2017-08-15 rocky <rb@dustyfeet.com>
|
||||
|
||||
* : commit c54a47b15f85be50d2278aa79fd514eb08580e65 Author: rocky
|
||||
|
18
NEWS
18
NEWS
@@ -1,3 +1,21 @@
|
||||
uncompyle6 2.13.0 2017-10-10
|
||||
|
||||
- Fixes in deparsing lambda expressions
|
||||
- Improve table-semantics descriptions
|
||||
- Document hacky customize arg count better (until we can remove it)
|
||||
- Update to use xdis 3.7.0 or greater
|
||||
|
||||
uncompyle6 2.12.0 2017-09-26
|
||||
|
||||
- Use xdis 3.6.0 or greater now
|
||||
- Small semantic table cleanups
|
||||
- Python 3.4's terms a little names better
|
||||
- Slightly more Python 3.7, but still failing a lot
|
||||
|
||||
uncompyle6 2.11.5 2017-08-31
|
||||
|
||||
- Skeletal support for Python 3.7
|
||||
|
||||
uncompyle6 2.11.4 2017-08-15
|
||||
|
||||
* scanner and parser now allow 3-part version string lookups,
|
||||
|
@@ -4,7 +4,7 @@ uncompyle6
|
||||
==========
|
||||
|
||||
A native Python cross-version Decompiler and Fragment Decompiler.
|
||||
Follows in the tradition of decompyle, uncompyle, and uncompyle2.
|
||||
The successor to decompyle, uncompyle, and uncompyle2.
|
||||
|
||||
|
||||
Introduction
|
||||
|
@@ -39,8 +39,8 @@ entry_points = {
|
||||
'pydisassemble=uncompyle6.bin.pydisassemble:main',
|
||||
]}
|
||||
ftp_url = None
|
||||
install_requires = ['spark-parser >= 1.6.1, < 1.7.0',
|
||||
'xdis >= 3.5.5, < 3.6.0']
|
||||
install_requires = ['spark-parser >= 1.7.0, < 1.8.0',
|
||||
'xdis >= 3.6.0, < 3.7.0']
|
||||
license = 'MIT'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
|
@@ -11,15 +11,16 @@ def test_grammar():
|
||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
assert remain_tokens == set([]), \
|
||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dumpGrammar())
|
||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
|
||||
|
||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
expect_lhs = set(['expr1024', 'pos_arg'])
|
||||
unused_rhs = set(['build_list', 'call_function', 'mkfunc',
|
||||
'mklambda',
|
||||
'unpack', 'unpack_list'])
|
||||
expect_right_recursive = [['designList', ('designator', 'DUP_TOP', 'designList')]]
|
||||
expect_right_recursive = frozenset([('designList',
|
||||
('designator', 'DUP_TOP', 'designList'))])
|
||||
if PYTHON3:
|
||||
expect_lhs.add('load_genexpr')
|
||||
|
||||
@@ -39,13 +40,14 @@ def test_grammar():
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
ignore_set = set(
|
||||
"""
|
||||
JUMP_BACK CONTINUE RETURN_END_IF
|
||||
JUMP_BACK CONTINUE
|
||||
COME_FROM COME_FROM_EXCEPT
|
||||
COME_FROM_EXCEPT_CLAUSE
|
||||
COME_FROM_LOOP COME_FROM_WITH
|
||||
COME_FROM_FINALLY ELSE
|
||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP
|
||||
LAMBDA_MARKER RETURN_LAST
|
||||
LAMBDA_MARKER
|
||||
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
||||
""".split())
|
||||
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||
|
164
pytest/test_pysource.py
Normal file
164
pytest/test_pysource.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.consts import (
|
||||
escape, NONE,
|
||||
# RETURN_NONE, PASS, RETURN_LOCALS
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from io import StringIO
|
||||
else:
|
||||
from StringIO import StringIO
|
||||
|
||||
from uncompyle6.semantics.pysource import SourceWalker as SourceWalker
|
||||
|
||||
def test_template_engine():
|
||||
s = StringIO()
|
||||
sw = SourceWalker(2.7, s, None)
|
||||
sw.ast = NONE
|
||||
sw.template_engine(('--%c--', 0), NONE)
|
||||
print(sw.f.getvalue())
|
||||
assert sw.f.getvalue() == '--None--'
|
||||
# FIXME: and so on...
|
||||
|
||||
from uncompyle6.semantics.consts import (
|
||||
TABLE_R, TABLE_DIRECT,
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.fragments import (
|
||||
TABLE_DIRECT_FRAGMENT,
|
||||
)
|
||||
|
||||
skip_for_now = "DELETE_DEREF".split()
|
||||
|
||||
def test_tables():
|
||||
for t, name, fragment in (
|
||||
(TABLE_DIRECT, 'TABLE_DIRECT', False),
|
||||
(TABLE_R, 'TABLE_R', False),
|
||||
(TABLE_DIRECT_FRAGMENT, 'TABLE_DIRECT_FRAGMENT', True)):
|
||||
for k, entry in t.iteritems():
|
||||
if k in skip_for_now:
|
||||
continue
|
||||
fmt = entry[0]
|
||||
arg = 1
|
||||
i = 0
|
||||
m = escape.search(fmt)
|
||||
print("%s[%s]" % (name, k))
|
||||
while m:
|
||||
i = m.end()
|
||||
typ = m.group('type') or '{'
|
||||
if typ in frozenset(['%', '+', '-', '|', ',', '{']):
|
||||
# No args
|
||||
pass
|
||||
elif typ in frozenset(['c', 'p', 'P', 'C', 'D']):
|
||||
# One arg - should be int or tuple of int
|
||||
if typ == 'c':
|
||||
assert isinstance(entry[arg], int), (
|
||||
"%s[%s][%d] type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
elif typ in frozenset(['C', 'D']):
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 3
|
||||
for j, x in enumerate(tup[:-1]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[-1], str) or tup[-1] is None, (
|
||||
"%s[%s][%d][%d] sep type %s is %s should be an string but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, tup[-1], type(x), entry)
|
||||
)
|
||||
|
||||
elif typ == 'P':
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is %s should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 4
|
||||
for j, x in enumerate(tup[:-2]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[-2], str), (
|
||||
"%s[%s][%d][%d] sep type %s is '%s' should be an string but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
assert isinstance(tup[1], int), (
|
||||
"%s[%s][%d][%d] prec type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
|
||||
else:
|
||||
# Should be a tuple which contains only ints
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 2
|
||||
for j, x in enumerate(tup):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type '%s' is '%s should be an int but is %s. Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
pass
|
||||
arg += 1
|
||||
elif typ in frozenset(['r']) and fragment:
|
||||
pass
|
||||
elif typ == 'b' and fragment:
|
||||
assert isinstance(entry[arg], int), (
|
||||
"%s[%s][%d] type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
arg += 1
|
||||
elif typ == 'x' and fragment:
|
||||
tup = entry[arg]
|
||||
assert isinstance(tup, tuple), (
|
||||
"%s[%s][%d] type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert len(tup) == 2
|
||||
assert isinstance(tup[0], int), (
|
||||
"%s[%s][%d] source type %s is '%s' should be an int but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
assert isinstance(tup[1], tuple), (
|
||||
"%s[%s][%d] dest type %s is '%s' should be an tuple but is %s. "
|
||||
"Full entry: %s" %
|
||||
(name, k, arg, typ, entry[arg], type(entry[arg]), entry)
|
||||
)
|
||||
for j, x in enumerate(tup[1]):
|
||||
assert isinstance(x, int), (
|
||||
"%s[%s][%d][%d] type %s is %s should be an int but is %s. Full entry: %s" %
|
||||
(name, k, arg, j, typ, x, type(x), entry)
|
||||
)
|
||||
arg += 1
|
||||
pass
|
||||
else:
|
||||
assert False, (
|
||||
"%s[%s][%d] type %s is not known. Full entry: %s" %
|
||||
(name, k, arg, typ, entry)
|
||||
)
|
||||
m = escape.search(fmt, i)
|
||||
pass
|
||||
assert arg == len(entry), (
|
||||
"%s[%s] arg %d should be length of entry %d. Full entry: %s" %
|
||||
(name, k, arg, len(entry), entry))
|
@@ -39,7 +39,7 @@ check-3.3: check-bytecode
|
||||
|
||||
#: Run working tests from Python 3.4
|
||||
check-3.4: check-bytecode check-3.4-ok check-2.7-ok
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --verify $(COMPILE)
|
||||
$(PYTHON) test_pythonlib.py --bytecode-3.4 --weak-verify $(COMPILE)
|
||||
|
||||
#: Run working tests from Python 3.5
|
||||
check-3.5: check-bytecode
|
||||
|
5
test/simple_source/branching/02_ifelse_lambda.py
Normal file
5
test/simple_source/branching/02_ifelse_lambda.py
Normal file
@@ -0,0 +1,5 @@
|
||||
# We have to do contortions here because
|
||||
# lambda's have to be more or less on a line
|
||||
|
||||
f = lambda x: 1 if x<2 else 3
|
||||
f(5)
|
@@ -169,13 +169,13 @@ def do_tests(src_dir, obj_patterns, target_dir, opts):
|
||||
main(src_dir, target_dir, files, [],
|
||||
do_verify=opts['do_verify'])
|
||||
if failed_files != 0:
|
||||
exit(2)
|
||||
sys.exit(2)
|
||||
elif failed_verify != 0:
|
||||
exit(3)
|
||||
sys.exit(3)
|
||||
|
||||
except (KeyboardInterrupt, OSError):
|
||||
print()
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
if test_opts['rmtree']:
|
||||
parent_dir = os.path.dirname(target_dir)
|
||||
print("Everything good, removing %s" % parent_dir)
|
||||
|
@@ -42,21 +42,25 @@ class PythonParser(GenericASTBuilder):
|
||||
else:
|
||||
return self.ast_first_offset(ast[0])
|
||||
|
||||
def add_unique_rule(self, rule, opname, count, customize):
|
||||
def add_unique_rule(self, rule, opname, arg_count, customize):
|
||||
"""Add rule to grammar, but only if it hasn't been added previously
|
||||
opname and count are used in the customize() semantic the actions
|
||||
to add the semantic action rule. Often, count is not used.
|
||||
opname and stack_count are used in the customize() semantic
|
||||
the actions to add the semantic action rule. Stack_count is
|
||||
used in custom opcodes like MAKE_FUNCTION to indicate how
|
||||
many arguments it has. Often it is not used.
|
||||
"""
|
||||
if rule not in self.new_rules:
|
||||
# print("XXX ", rule) # debug
|
||||
self.new_rules.add(rule)
|
||||
self.addRule(rule, nop_func)
|
||||
customize[opname] = count
|
||||
customize[opname] = arg_count
|
||||
pass
|
||||
return
|
||||
|
||||
def add_unique_rules(self, rules, customize):
|
||||
"""Add rules (a list of string) to grammar
|
||||
"""Add rules (a list of string) to grammar. Note that
|
||||
the rules must not be those that set arg_count in the
|
||||
custom dictionary.
|
||||
"""
|
||||
for rule in rules:
|
||||
if len(rule) == 0:
|
||||
@@ -66,7 +70,9 @@ class PythonParser(GenericASTBuilder):
|
||||
return
|
||||
|
||||
def add_unique_doc_rules(self, rules_str, customize):
|
||||
"""Add rules (a docstring-like list of rules) to grammar
|
||||
"""Add rules (a docstring-like list of rules) to grammar.
|
||||
Note that the rules must not be those that set arg_count in the
|
||||
custom dictionary.
|
||||
"""
|
||||
rules = [r.strip() for r in rules_str.split("\n")]
|
||||
self.add_unique_rules(rules, customize)
|
||||
@@ -83,17 +89,14 @@ class PythonParser(GenericASTBuilder):
|
||||
for i in dir(self):
|
||||
setattr(self, i, None)
|
||||
|
||||
def debug_reduce(self, rule, tokens, parent, i):
|
||||
def debug_reduce(self, rule, tokens, parent, last_token_pos):
|
||||
"""Customized format and print for our kind of tokens
|
||||
which gets called in debugging grammar reduce rules
|
||||
"""
|
||||
def fix(c):
|
||||
s = str(c)
|
||||
i = s.find('_')
|
||||
if i == -1:
|
||||
return s
|
||||
else:
|
||||
return s[:i]
|
||||
last_token_pos = s.find('_')
|
||||
return s if last_token_pos == -1 else s[:last_token_pos]
|
||||
|
||||
prefix = ''
|
||||
if parent and tokens:
|
||||
@@ -105,13 +108,13 @@ class PythonParser(GenericASTBuilder):
|
||||
if hasattr(p_token, 'offset'):
|
||||
prefix += "%3s" % fix(p_token.offset)
|
||||
if len(rule[1]) > 1:
|
||||
prefix += '-%-3s ' % fix(tokens[i-1].offset)
|
||||
prefix += '-%-3s ' % fix(tokens[last_token_pos-1].offset)
|
||||
else:
|
||||
prefix += ' '
|
||||
else:
|
||||
prefix = ' '
|
||||
|
||||
print("%s%s ::= %s" % (prefix, rule[0], ' '.join(rule[1])))
|
||||
print("%s%s ::= %s (%d)" % (prefix, rule[0], ' '.join(rule[1]), last_token_pos))
|
||||
|
||||
def error(self, instructions, index):
|
||||
# Find the last line boundary
|
||||
@@ -132,7 +135,7 @@ class PythonParser(GenericASTBuilder):
|
||||
raise ParserError(err_token, err_token.offset)
|
||||
|
||||
def typestring(self, token):
|
||||
return token.type
|
||||
return token.kind
|
||||
|
||||
def nonterminal(self, nt, args):
|
||||
if nt in self.collect and len(args) > 1:
|
||||
@@ -254,8 +257,11 @@ class PythonParser(GenericASTBuilder):
|
||||
|
||||
stmt ::= return_stmt
|
||||
return_stmt ::= ret_expr RETURN_VALUE
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
|
||||
return_stmts ::= return_stmt
|
||||
return_stmts ::= _stmts return_stmt
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -530,7 +536,9 @@ class PythonParser(GenericASTBuilder):
|
||||
stmt ::= return_lambda
|
||||
stmt ::= conditional_lambda
|
||||
|
||||
return_lambda ::= ret_expr RETURN_VALUE LAMBDA_MARKER
|
||||
return_lambda ::= ret_expr RETURN_VALUE_LAMBDA LAMBDA_MARKER
|
||||
return_lambda ::= ret_expr RETURN_VALUE_LAMBDA
|
||||
|
||||
conditional_lambda ::= expr jmp_false return_if_stmt return_stmt LAMBDA_MARKER
|
||||
|
||||
cmp ::= cmp_list
|
||||
@@ -726,7 +734,7 @@ def get_python_parser(
|
||||
else:
|
||||
p = parse3.Python3ParserSingle(debug_parser)
|
||||
p.version = version
|
||||
# p.dumpGrammar() # debug
|
||||
# p.dump_grammar() # debug
|
||||
return p
|
||||
|
||||
class PythonParserSingle(PythonParser):
|
||||
|
@@ -29,8 +29,8 @@ class Python15ParserSingle(Python21Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python15Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -395,6 +395,8 @@ class Python2Parser(PythonParser):
|
||||
return
|
||||
|
||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||
if tokens is None:
|
||||
return False
|
||||
lhs = rule[0]
|
||||
if lhs in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
return True
|
||||
@@ -415,4 +417,4 @@ class Python2ParserSingle(Python2Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python2Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -33,8 +33,8 @@ class Python21ParserSingle(Python22Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python21Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -26,8 +26,8 @@ class Python22ParserSingle(Python23Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python22Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -67,8 +67,8 @@ class Python23ParserSingle(Python23Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python23Parser()
|
||||
p.checkGrammar()
|
||||
p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
||||
# local variables:
|
||||
# tab-width: 4
|
||||
|
@@ -55,13 +55,14 @@ class Python24Parser(Python25Parser):
|
||||
invalid = super(Python24Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
|
||||
# FiXME: this code never gets called...
|
||||
lhs = rule[0]
|
||||
if lhs == 'nop_stmt':
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
l = len(tokens)
|
||||
if 0 <= l < len(tokens):
|
||||
return not int(tokens[first].pattr) == tokens[last].offset
|
||||
|
||||
return False
|
||||
|
||||
@@ -71,4 +72,4 @@ class Python24ParserSingle(Python24Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python24Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -60,4 +60,4 @@ class Python25ParserSingle(Python26Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python25Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
|
@@ -247,7 +247,9 @@ class Python26Parser(Python2Parser):
|
||||
and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP
|
||||
cmp_list ::= expr cmp_list1 ROT_TWO COME_FROM POP_TOP _come_from
|
||||
|
||||
conditional_lambda ::= expr jmp_false_then return_if_stmt return_stmt LAMBDA_MARKER
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA POP_TOP
|
||||
conditional_lambda ::= expr jmp_false_then expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
@@ -258,7 +260,7 @@ class Python26Parser(Python2Parser):
|
||||
invalid = super(Python26Parser,
|
||||
self).reduce_is_invalid(rule, ast,
|
||||
tokens, first, last)
|
||||
if invalid:
|
||||
if invalid or tokens is None:
|
||||
return invalid
|
||||
if rule == ('and', ('expr', 'jmp_false', 'expr', '\\e_come_from_opt')):
|
||||
# Test that jmp_false jumps to the end of "and"
|
||||
@@ -274,10 +276,10 @@ class Python26ParserSingle(Python2Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python26Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.6:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -94,6 +94,10 @@ class Python27Parser(Python2Parser):
|
||||
WITH_CLEANUP END_FINALLY
|
||||
|
||||
# Common with 2.6
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
while1stmt ::= SETUP_LOOP return_stmts bp_come_from
|
||||
while1stmt ::= SETUP_LOOP return_stmts COME_FROM
|
||||
"""
|
||||
@@ -125,10 +129,10 @@ class Python27ParserSingle(Python27Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python27Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 2.7:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
@@ -144,4 +148,5 @@ if __name__ == '__main__':
|
||||
for t in remain_tokens])
|
||||
remain_tokens = set(remain_tokens) - opcode_set
|
||||
print(remain_tokens)
|
||||
# p.dumpGrammar()
|
||||
p.check_grammar()
|
||||
p.dump_grammar()
|
||||
|
@@ -18,6 +18,7 @@ that a later phase can turn into a sequence of ASCII text.
|
||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from xdis import PYTHON3
|
||||
|
||||
class Python3Parser(PythonParser):
|
||||
|
||||
@@ -415,6 +416,13 @@ class Python3Parser(PythonParser):
|
||||
# a JUMP_ABSOLUTE with no COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_absolute_else expr
|
||||
|
||||
return_if_lambda ::= RETURN_END_IF_LAMBDA
|
||||
conditional_lambda ::= expr jmp_false return_stmt_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
|
||||
|
||||
expr ::= LOAD_CLASSNAME
|
||||
|
||||
# Python 3.4+
|
||||
@@ -425,7 +433,7 @@ class Python3Parser(PythonParser):
|
||||
@staticmethod
|
||||
def call_fn_name(token):
|
||||
"""Customize CALL_FUNCTION to add the number of positional arguments"""
|
||||
return '%s_%i' % (token.type, token.attr)
|
||||
return '%s_%i' % (token.kind, token.attr)
|
||||
|
||||
def custom_build_class_rule(self, opname, i, token, tokens, customize):
|
||||
'''
|
||||
@@ -441,16 +449,16 @@ class Python3Parser(PythonParser):
|
||||
# FIXME: I bet this can be simplified
|
||||
# look for next MAKE_FUNCTION
|
||||
for i in range(i+1, len(tokens)):
|
||||
if tokens[i].type.startswith('MAKE_FUNCTION'):
|
||||
if tokens[i].kind.startswith('MAKE_FUNCTION'):
|
||||
break
|
||||
elif tokens[i].type.startswith('MAKE_CLOSURE'):
|
||||
elif tokens[i].kind.startswith('MAKE_CLOSURE'):
|
||||
break
|
||||
pass
|
||||
assert i < len(tokens), "build_class needs to find MAKE_FUNCTION or MAKE_CLOSURE"
|
||||
assert tokens[i+1].type == 'LOAD_CONST', \
|
||||
assert tokens[i+1].kind == 'LOAD_CONST', \
|
||||
"build_class expecting CONST after MAKE_FUNCTION/MAKE_CLOSURE"
|
||||
for i in range(i, len(tokens)):
|
||||
if tokens[i].type == 'CALL_FUNCTION':
|
||||
if tokens[i].kind == 'CALL_FUNCTION':
|
||||
call_fn_tok = tokens[i]
|
||||
break
|
||||
assert call_fn_tok, "build_class custom rule needs to find CALL_FUNCTION"
|
||||
@@ -491,7 +499,7 @@ class Python3Parser(PythonParser):
|
||||
# Yes, this computation based on instruction name is a little bit hoaky.
|
||||
nak = ( len(opname)-len('CALL_FUNCTION') ) // 3
|
||||
|
||||
token.type = self.call_fn_name(token)
|
||||
token.kind = self.call_fn_name(token)
|
||||
uniq_param = args_kw + args_pos
|
||||
if self.version == 3.5 and opname.startswith('CALL_FUNCTION_VAR'):
|
||||
# Python 3.5 changes the stack position of *args. KW args come
|
||||
@@ -503,33 +511,33 @@ class Python3Parser(PythonParser):
|
||||
kw = ''
|
||||
rule = ('call_function ::= expr expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) + kw + token.type)
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
('kwarg ' * args_kw) + kw + token.kind)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
if self.version >= 3.6 and opname == 'CALL_FUNCTION_EX_KW':
|
||||
rule = ('call_function36 ::= '
|
||||
'expr build_tuple_unpack_with_call build_map_unpack_with_call '
|
||||
'CALL_FUNCTION_EX_KW_1')
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
rule = 'call_function ::= call_function36'
|
||||
else:
|
||||
rule = ('call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type)
|
||||
'expr ' * nak + token.kind)
|
||||
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
if self.version >= 3.5:
|
||||
rule = ('async_call_function ::= expr ' +
|
||||
('pos_arg ' * args_pos) +
|
||||
('kwarg ' * args_kw) +
|
||||
'expr ' * nak + token.type +
|
||||
'expr ' * nak + token.kind +
|
||||
' GET_AWAITABLE LOAD_CONST YIELD_FROM')
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
self.add_unique_rule('expr ::= async_call_function', token.kind, uniq_param, customize)
|
||||
|
||||
rule = ('classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d'
|
||||
% (('expr ' * (args_pos-1)), opname, args_pos))
|
||||
self.add_unique_rule(rule, token.type, uniq_param, customize)
|
||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||
|
||||
def add_make_function_rule(self, rule, opname, attr, customize):
|
||||
"""Python 3.3 added a an addtional LOAD_CONST before MAKE_FUNCTION and
|
||||
@@ -606,7 +614,7 @@ class Python3Parser(PythonParser):
|
||||
call_function ::= expr CALL_METHOD
|
||||
"""
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
if opname == 'PyPy':
|
||||
@@ -890,8 +898,11 @@ class Python3Parser(PythonParser):
|
||||
elif lhs == 'annotate_tuple':
|
||||
return not isinstance(tokens[first].attr, tuple)
|
||||
elif lhs == 'kwarg':
|
||||
return not (isinstance(tokens[first].attr, unicode) or
|
||||
isinstance(tokens[first].attr, str))
|
||||
arg = tokens[first].attr
|
||||
if PYTHON3:
|
||||
return not isinstance(arg, str)
|
||||
else:
|
||||
return not (isinstance(arg, str) or isinstance(arg, unicode))
|
||||
elif lhs == 'while1elsestmt':
|
||||
# if SETUP_LOOP target spans the else part, then this is
|
||||
# not while1else. Also do for whileTrue?
|
||||
@@ -900,7 +911,8 @@ class Python3Parser(PythonParser):
|
||||
last += 1
|
||||
return tokens[first].attr == tokens[last].offset
|
||||
elif lhs == 'while1stmt':
|
||||
if tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK'):
|
||||
if (0 <= last < len(tokens)
|
||||
and tokens[last] in ('COME_FROM_LOOP', 'JUMP_BACK')):
|
||||
# jump_back should be right afer SETUP_LOOP. Test?
|
||||
last += 1
|
||||
while last < len(tokens) and isinstance(tokens[last].offset, str):
|
||||
@@ -944,10 +956,10 @@ def info(args):
|
||||
p = Python32Parser()
|
||||
elif arg == '3.0':
|
||||
p = Python30Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
if len(sys.argv) > 1 and sys.argv[1] == 'dump':
|
||||
print('-' * 50)
|
||||
p.dumpGrammar()
|
||||
p.dump_grammar()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
@@ -42,7 +42,7 @@ class Python32Parser(Python3Parser):
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python32Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
if opname.startswith('MAKE_FUNCTION_A'):
|
||||
args_pos, args_kw, annotate_args = token.attr
|
||||
# Check that there are 2 annotated params?
|
||||
|
@@ -29,10 +29,10 @@ class Python34ParserSingle(Python34Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python34Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.4:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -142,7 +142,7 @@ class Python35Parser(Python34Parser):
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python35Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
if opname == 'BUILD_MAP_UNPACK_WITH_CALL':
|
||||
nargs = token.attr % 256
|
||||
map_unpack_n = "map_unpack_%s" % nargs
|
||||
@@ -152,7 +152,7 @@ class Python35Parser(Python34Parser):
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
call_token = tokens[i+1]
|
||||
if self.version == 3.5:
|
||||
rule = 'call_function ::= expr unmapexpr ' + call_token.type
|
||||
rule = 'call_function ::= expr unmapexpr ' + call_token.kind
|
||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||
pass
|
||||
pass
|
||||
@@ -164,10 +164,10 @@ class Python35ParserSingle(Python35Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python35Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.5:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -36,7 +36,7 @@ class Python36Parser(Python35Parser):
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
super(Python36Parser, self).add_custom_rules(tokens, customize)
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.type
|
||||
opname = token.kind
|
||||
|
||||
if opname == 'FORMAT_VALUE':
|
||||
rules_str = """
|
||||
@@ -64,10 +64,10 @@ class Python36Parser(Python35Parser):
|
||||
|
||||
if opname.startswith('CALL_FUNCTION_KW'):
|
||||
values = 'expr ' * token.attr
|
||||
rule = 'call_function ::= expr kwargs_only_36 {token.type}'.format(**locals())
|
||||
self.add_unique_rule(rule, token.type, token.attr, customize)
|
||||
rule = 'call_function ::= expr kwargs_only_36 {token.kind}'.format(**locals())
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
rule = 'kwargs_only_36 ::= {values} LOAD_CONST'.format(**locals())
|
||||
self.add_unique_rule(rule, token.type, token.attr, customize)
|
||||
self.add_unique_rule(rule, token.kind, token.attr, customize)
|
||||
else:
|
||||
super(Python36Parser, self).custom_classfunc_rule(opname, token, customize)
|
||||
|
||||
@@ -78,10 +78,10 @@ class Python36ParserSingle(Python36Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python36Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.6:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -21,10 +21,10 @@ class Python37ParserSingle(Python37Parser, PythonParserSingle):
|
||||
if __name__ == '__main__':
|
||||
# Check grammar
|
||||
p = Python37Parser()
|
||||
p.checkGrammar()
|
||||
p.check_grammar()
|
||||
from uncompyle6 import PYTHON_VERSION, IS_PYPY
|
||||
if PYTHON_VERSION == 3.7:
|
||||
lhs, rhs, tokens, right_recursive = p.checkSets()
|
||||
lhs, rhs, tokens, right_recursive = p.check_sets()
|
||||
from uncompyle6.scanner import get_scanner
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
opcode_set = set(s.opc.opname).union(set(
|
||||
|
@@ -20,7 +20,7 @@ from xdis.magics import py_str2float
|
||||
# The byte code versions we support
|
||||
PYTHON_VERSIONS = (1.5,
|
||||
2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7,
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6)
|
||||
3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6, 3.7)
|
||||
|
||||
# FIXME: DRY
|
||||
if PYTHON3:
|
||||
@@ -54,7 +54,7 @@ class Scanner(object):
|
||||
|
||||
if version in PYTHON_VERSIONS:
|
||||
if is_pypy:
|
||||
v_str = "opcode_pypy%s" % (int(version * 10))
|
||||
v_str = "opcode_%spypy" % (int(version * 10))
|
||||
else:
|
||||
v_str = "opcode_%s" % (int(version * 10))
|
||||
exec("from xdis.opcodes import %s" % v_str)
|
||||
@@ -63,6 +63,7 @@ class Scanner(object):
|
||||
raise TypeError("%s is not a Python version I know about" % version)
|
||||
|
||||
self.opname = self.opc.opname
|
||||
|
||||
# FIXME: This weird Python2 behavior is not Python3
|
||||
self.resetTokenClass()
|
||||
|
||||
@@ -99,7 +100,7 @@ class Scanner(object):
|
||||
def print_bytecode(self):
|
||||
for i in self.op_range(0, len(self.code)):
|
||||
op = self.code[i]
|
||||
if op in self.JUMP_OPs:
|
||||
if op in self.JUMP_OPS:
|
||||
dest = self.get_target(i, op)
|
||||
print('%i\t%s\t%i' % (i, self.opname[op], dest))
|
||||
else:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python PyPy 2.7 bytecode scanner/deparser
|
||||
|
||||
@@ -10,8 +10,8 @@ information for later use in deparsing.
|
||||
import uncompyle6.scanners.scanner27 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_pypy27
|
||||
JUMP_OPs = opcode_pypy27.JUMP_OPs
|
||||
from xdis.opcodes import opcode_27pypy
|
||||
JUMP_OPS = opcode_27pypy.JUMP_OPS
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -8,9 +8,9 @@ make things easier for decompilation.
|
||||
|
||||
import uncompyle6.scanners.scanner35 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
# bytecode verification, verify(), uses JUMP_OPS from here
|
||||
from xdis.opcodes import opcode_35 as opc # is this right?
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPs = opc.JUMP_OPS
|
||||
|
||||
# We base this off of 3.5
|
||||
class ScannerPyPy35(scan.Scanner35):
|
||||
|
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner21 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_15
|
||||
JUMP_OPs = opcode_15.JUMP_OPs
|
||||
JUMP_OPS = opcode_15.JUMP_OPS
|
||||
|
||||
# We base this off of 2.1 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -99,12 +99,18 @@ class Scanner2(Scanner):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
# list of tokens/instructions
|
||||
tokens = []
|
||||
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
# and the value is the argument stack entries for that
|
||||
# nonterminal. The count is a little hoaky. It is mostly
|
||||
# not used, but sometimes it is.
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
customize = {}
|
||||
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
customize['PyPy'] = 0
|
||||
|
||||
Token = self.Token # shortcut
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.1 bytecode scanner/deparser
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner22 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_21
|
||||
JUMP_OPs = opcode_21.JUMP_OPs
|
||||
JUMP_OPS = opcode_21.JUMP_OPS
|
||||
|
||||
# We base this off of 2.2 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.2 bytecode ingester.
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner23 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_22
|
||||
JUMP_OPs = opcode_22.JUMP_OPs
|
||||
JUMP_OPS = opcode_22.JUMP_OPS
|
||||
|
||||
# We base this off of 2.3 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
@@ -30,5 +30,5 @@ class Scanner22(scan.Scanner23):
|
||||
|
||||
def ingest22(self, co, classname=None, code_objects={}, show_asm=None):
|
||||
tokens, customize = self.parent_ingest(co, classname, code_objects, show_asm)
|
||||
tokens = [t for t in tokens if t.type != 'SET_LINENO']
|
||||
tokens = [t for t in tokens if t.kind != 'SET_LINENO']
|
||||
return tokens, customize
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.3 bytecode scanner/deparser
|
||||
|
||||
@@ -10,7 +10,7 @@ import uncompyle6.scanners.scanner24 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_23
|
||||
JUMP_OPs = opcode_23.JUMP_OPs
|
||||
JUMP_OPS = opcode_23.JUMP_OPS
|
||||
|
||||
# We base this off of 2.4 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.4 bytecode scanner/deparser
|
||||
|
||||
@@ -10,7 +10,7 @@ import uncompyle6.scanners.scanner25 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_24
|
||||
JUMP_OPs = opcode_24.JUMP_OPs
|
||||
JUMP_OPS = opcode_24.JUMP_OPS
|
||||
|
||||
# We base this off of 2.5 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 2.5 bytecode scanner/deparser
|
||||
|
||||
@@ -11,7 +11,7 @@ import uncompyle6.scanners.scanner26 as scan
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_25
|
||||
JUMP_OPs = opcode_25.JUMP_OPs
|
||||
JUMP_OPS = opcode_25.JUMP_OPS
|
||||
|
||||
# We base this off of 2.6 instead of the other way around
|
||||
# because we cleaned things up this way.
|
||||
|
@@ -19,7 +19,7 @@ from uncompyle6.scanner import L65536
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_26
|
||||
JUMP_OPs = opcode_26.JUMP_OPs
|
||||
JUMP_OPS = opcode_26.JUMP_OPS
|
||||
|
||||
class Scanner26(scan.Scanner2):
|
||||
def __init__(self, show_asm=False):
|
||||
@@ -217,8 +217,8 @@ class Scanner26(scan.Scanner2):
|
||||
# FIXME: this is a hack to catch stuff like:
|
||||
# if x: continue
|
||||
# the "continue" is not on a new line.
|
||||
if len(tokens) and tokens[-1].type == 'JUMP_BACK':
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
if len(tokens) and tokens[-1].kind == 'JUMP_BACK':
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
elif op in self.opc.JABS_OPS:
|
||||
pattr = repr(oparg)
|
||||
@@ -258,18 +258,18 @@ class Scanner26(scan.Scanner2):
|
||||
and self.code[offset+3] not in (self.opc.END_FINALLY,
|
||||
self.opc.POP_BLOCK)):
|
||||
if ((offset in self.linestartoffsets and
|
||||
tokens[-1].type == 'JUMP_BACK')
|
||||
tokens[-1].kind == 'JUMP_BACK')
|
||||
or offset not in self.not_continue):
|
||||
op_name = 'CONTINUE'
|
||||
else:
|
||||
# FIXME: this is a hack to catch stuff like:
|
||||
# if x: continue
|
||||
# the "continue" is not on a new line.
|
||||
if tokens[-1].type == 'JUMP_BACK':
|
||||
if tokens[-1].kind == 'JUMP_BACK':
|
||||
# We need 'intern' since we have
|
||||
# already have processed the previous
|
||||
# token.
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
elif op == self.opc.LOAD_GLOBAL:
|
||||
if offset in self.load_asserts:
|
||||
|
@@ -16,7 +16,7 @@ if PYTHON3:
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_27
|
||||
JUMP_OPs = opcode_27.JUMP_OPs
|
||||
JUMP_OPS = opcode_27.JUMP_OPs
|
||||
|
||||
class Scanner27(Scanner2):
|
||||
def __init__(self, show_asm=False, is_pypy=False):
|
||||
@@ -92,9 +92,9 @@ class Scanner27(Scanner2):
|
||||
# the "continue" is not on a new line.
|
||||
n = len(tokens)
|
||||
if (n > 2 and
|
||||
tokens[-1].type == 'JUMP_BACK' and
|
||||
tokens[-1].kind == 'JUMP_BACK' and
|
||||
self.code[offset+3] == self.opc.END_FINALLY):
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
|
||||
pass
|
||||
|
||||
|
@@ -175,12 +175,16 @@ class Scanner3(Scanner):
|
||||
for instr in bytecode.get_instructions(co):
|
||||
print(instr._disassemble())
|
||||
|
||||
# Container for tokens
|
||||
# list of tokens/instructions
|
||||
tokens = []
|
||||
|
||||
# "customize" is a dict whose keys are nonterminals
|
||||
# and the value is the argument stack entries for that
|
||||
# nonterminal. The count is a little hoaky. It is mostly
|
||||
# not used, but sometimes it is.
|
||||
customize = {}
|
||||
if self.is_pypy:
|
||||
customize['PyPy'] = 1
|
||||
customize['PyPy'] = 0
|
||||
|
||||
self.code = array('B', co.co_code)
|
||||
self.build_lines_data(co)
|
||||
@@ -336,7 +340,7 @@ class Scanner3(Scanner):
|
||||
attr = (pos_args, name_pair_args, annotate_args)
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
opname = opname,
|
||||
attr = attr,
|
||||
pattr = pattr,
|
||||
offset = inst.offset,
|
||||
@@ -396,12 +400,12 @@ class Scanner3(Scanner):
|
||||
# the "continue" is not on a new line.
|
||||
# There are other situations where we don't catch
|
||||
# CONTINUE as well.
|
||||
if tokens[-1].type == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
if tokens[-2].type == 'BREAK_LOOP':
|
||||
if tokens[-1].kind == 'JUMP_BACK' and tokens[-1].attr <= argval:
|
||||
if tokens[-2].kind == 'BREAK_LOOP':
|
||||
del tokens[-1]
|
||||
else:
|
||||
# intern is used because we are changing the *previous* token
|
||||
tokens[-1].type = intern('CONTINUE')
|
||||
tokens[-1].kind = intern('CONTINUE')
|
||||
if last_op_was_break and opname == 'CONTINUE':
|
||||
last_op_was_break = False
|
||||
continue
|
||||
@@ -414,7 +418,7 @@ class Scanner3(Scanner):
|
||||
last_op_was_break = opname == 'BREAK_LOOP'
|
||||
tokens.append(
|
||||
Token(
|
||||
type_ = opname,
|
||||
opname = opname,
|
||||
attr = argval,
|
||||
pattr = pattr,
|
||||
offset = inst.offset,
|
||||
@@ -949,7 +953,7 @@ class Scanner3(Scanner):
|
||||
return
|
||||
pass
|
||||
pass
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE and self.version < 3.5:
|
||||
if code[pre_rtarget] == self.opc.RETURN_VALUE:
|
||||
self.return_end_ifs.add(pre_rtarget)
|
||||
else:
|
||||
self.fixed_jumps[offset] = rtarget
|
||||
@@ -969,7 +973,7 @@ class Scanner3(Scanner):
|
||||
if target > next_offset:
|
||||
next_op = code[next_offset]
|
||||
if (self.opc.JUMP_ABSOLUTE == next_op and
|
||||
END_FINALLY != code[xdis.next_offset(next_op, self.opc, next_offset)]):
|
||||
self.opc.END_FINALLY != code[xdis.next_offset(next_op, self.opc, next_offset)]):
|
||||
self.fixed_jumps[next_offset] = target
|
||||
self.except_targets[target] = next_offset
|
||||
|
||||
@@ -992,7 +996,8 @@ class Scanner3(Scanner):
|
||||
# misclassified as RETURN_END_IF. Handle that here.
|
||||
# In RETURN_VALUE, JUMP_ABSOLUTE, RETURN_VALUE is never RETURN_END_IF
|
||||
if op == self.opc.RETURN_VALUE:
|
||||
if (offset+1 < len(code) and code[offset+1] == self.opc.JUMP_ABSOLUTE and
|
||||
next_offset = xdis.next_offset(op, self.opc, offset)
|
||||
if (next_offset < len(code) and code[next_offset] == self.opc.JUMP_ABSOLUTE and
|
||||
offset in self.return_end_ifs):
|
||||
self.return_end_ifs.remove(offset)
|
||||
pass
|
||||
|
@@ -10,7 +10,7 @@ scanner routine for Python 3.
|
||||
from xdis.opcodes import opcode_30 as opc
|
||||
from xdis.bytecode import op_size
|
||||
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
JUMP_TF = frozenset([opc.JUMP_IF_FALSE, opc.JUMP_IF_TRUE])
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.1 bytecode scanner/deparser
|
||||
|
||||
@@ -8,7 +8,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_31 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner31(Scanner3):
|
||||
|
@@ -11,7 +11,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_32 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner32(Scanner3):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2016 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.3 bytecode scanner/deparser
|
||||
|
||||
@@ -8,7 +8,7 @@ scanner routine for Python 3.
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_33 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
class Scanner33(Scanner3):
|
||||
|
@@ -12,7 +12,7 @@ scanner routine for Python 3.
|
||||
from xdis.opcodes import opcode_34 as opc
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
@@ -13,7 +13,7 @@ from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_35 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
class Scanner35(Scanner3):
|
||||
|
||||
|
@@ -11,9 +11,9 @@ scanner routine for Python 3.
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
# bytecode verification, verify(), uses JUMP_OPS from here
|
||||
from xdis.opcodes import opcode_36 as opc
|
||||
JUMP_OPs = map(lambda op: opc.opname[op], opc.hasjrel + opc.hasjabs)
|
||||
JUMP_OPS = opc.JUMP_OPS
|
||||
|
||||
class Scanner36(Scanner3):
|
||||
|
||||
@@ -27,14 +27,14 @@ class Scanner36(Scanner3):
|
||||
# The lowest bit of flags indicates whether the
|
||||
# var-keyword argument is placed at the top of the stack
|
||||
if t.op == self.opc.CALL_FUNCTION_EX and t.attr & 1:
|
||||
t.type = 'CALL_FUNCTION_EX_KW'
|
||||
t.kind = 'CALL_FUNCTION_EX_KW'
|
||||
pass
|
||||
elif t.op == self.opc.CALL_FUNCTION_KW:
|
||||
t.type = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals())
|
||||
t.kind = 'CALL_FUNCTION_KW_{t.attr}'.format(**locals())
|
||||
elif t.op == self.opc.BUILD_TUPLE_UNPACK_WITH_CALL:
|
||||
t.type = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr
|
||||
t.kind = 'BUILD_TUPLE_UNPACK_WITH_CALL_%d' % t.attr
|
||||
elif t.op == self.opc.BUILD_MAP_UNPACK_WITH_CALL:
|
||||
t.type = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr
|
||||
t.kind = 'BUILD_MAP_UNPACK_WITH_CALL_%d' % t.attr
|
||||
pass
|
||||
return tokens, customize
|
||||
|
||||
|
38
uncompyle6/scanners/scanner37.py
Normal file
38
uncompyle6/scanners/scanner37.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
"""
|
||||
Python 3.7 bytecode decompiler scanner
|
||||
|
||||
Does some additional massaging of xdis-disassembled instructions to
|
||||
make things easier for decompilation.
|
||||
|
||||
This sets up opcodes Python's 3.6 and calls a generalized
|
||||
scanner routine for Python 3.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from uncompyle6.scanners.scanner3 import Scanner3
|
||||
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_36 as opc
|
||||
JUMP_OPs = opc.JUMP_OPS
|
||||
|
||||
class Scanner37(Scanner3):
|
||||
|
||||
def __init__(self, show_asm=None):
|
||||
Scanner3.__init__(self, 3.7, show_asm)
|
||||
return
|
||||
pass
|
||||
|
||||
if __name__ == "__main__":
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
if PYTHON_VERSION == 3.7:
|
||||
import inspect
|
||||
co = inspect.currentframe().f_code
|
||||
tokens, customize = Scanner37().ingest(co)
|
||||
for t in tokens:
|
||||
print(t.format())
|
||||
pass
|
||||
else:
|
||||
print("Need to be Python 3.7 to demo; I am %s." %
|
||||
PYTHON_VERSION)
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2016-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
@@ -16,13 +16,12 @@ class Token:
|
||||
the contents of one line as output by dis.dis().
|
||||
"""
|
||||
# FIXME: match Python 3.4's terms:
|
||||
# type_ should be opname
|
||||
# linestart = starts_line
|
||||
# attr = argval
|
||||
# pattr = argrepr
|
||||
def __init__(self, type_, attr=None, pattr=None, offset=-1,
|
||||
def __init__(self, opname, attr=None, pattr=None, offset=-1,
|
||||
linestart=None, op=None, has_arg=None, opc=None):
|
||||
self.type = intern(type_)
|
||||
self.kind = intern(opname)
|
||||
self.op = op
|
||||
self.has_arg = has_arg
|
||||
self.attr = attr
|
||||
@@ -37,20 +36,20 @@ class Token:
|
||||
def __eq__(self, o):
|
||||
""" '==', but it's okay if offsets and linestarts are different"""
|
||||
if isinstance(o, Token):
|
||||
# Both are tokens: compare type and attr
|
||||
# Both are tokens: compare kind and attr
|
||||
# It's okay if offsets are different
|
||||
return (self.type == o.type) and (self.pattr == o.pattr)
|
||||
return (self.kind == o.kind) and (self.pattr == o.pattr)
|
||||
else:
|
||||
return self.type == o
|
||||
return self.kind == o
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.type)
|
||||
return str(self.kind)
|
||||
|
||||
# def __str__(self):
|
||||
# pattr = self.pattr if self.pattr is not None else ''
|
||||
# prefix = '\n%3d ' % self.linestart if self.linestart else (' ' * 6)
|
||||
# return (prefix +
|
||||
# ('%9s %-18s %r' % (self.offset, self.type, pattr)))
|
||||
# ('%9s %-18s %r' % (self.offset, self.kind, pattr)))
|
||||
|
||||
def __str__(self):
|
||||
return self.format(line_prefix='')
|
||||
@@ -60,7 +59,7 @@ class Token:
|
||||
prefix = '\n%s%4d ' % (line_prefix, self.linestart)
|
||||
else:
|
||||
prefix = ' ' * (6 + len(line_prefix))
|
||||
offset_opname = '%6s %-17s' % (self.offset, self.type)
|
||||
offset_opname = '%6s %-17s' % (self.offset, self.kind)
|
||||
if not self.has_arg:
|
||||
return "%s%s" % (prefix, offset_opname)
|
||||
|
||||
@@ -84,14 +83,14 @@ class Token:
|
||||
pattr = self.opc.cmp_op[self.attr]
|
||||
# And so on. See xdis/bytecode.py get_instructions_bytes
|
||||
pass
|
||||
elif re.search('_\d+$', self.type):
|
||||
elif re.search('_\d+$', self.kind):
|
||||
return "%s%s%s" % (prefix, offset_opname, argstr)
|
||||
else:
|
||||
pattr = ''
|
||||
return "%s%s%s %r" % (prefix, offset_opname, argstr, pattr)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.type)
|
||||
return hash(self.kind)
|
||||
|
||||
def __getitem__(self, i):
|
||||
raise IndexError
|
||||
|
@@ -9,16 +9,16 @@ before reduction and don't reduce when there is a problem.
|
||||
"""
|
||||
|
||||
def checker(ast, in_loop, errors):
|
||||
in_loop = in_loop or ast.type in ('while1stmt', 'whileTruestmt',
|
||||
in_loop = in_loop or ast.kind in ('while1stmt', 'whileTruestmt',
|
||||
'whilestmt', 'whileelsestmt', 'while1elsestmt',
|
||||
'for_block')
|
||||
if ast.type in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
if ast.kind in ('augassign1', 'augassign2') and ast[0][0] == 'and':
|
||||
text = str(ast)
|
||||
error_text = '\n# improper augmented assigment (e.g. +=, *=, ...):\n#\t' + '\n# '.join(text.split("\n")) + '\n'
|
||||
errors.append(error_text)
|
||||
|
||||
for node in ast:
|
||||
if not in_loop and node.type in ('continue_stmt', 'break_stmt'):
|
||||
if not in_loop and node.kind in ('continue_stmt', 'break_stmt'):
|
||||
text = str(node)
|
||||
error_text = '\n# not in loop:\n#\t' + '\n# '.join(text.split("\n"))
|
||||
errors.append(error_text)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# Copyright (c) 2017 by Rocky Bernstein
|
||||
"""Constants used in pysource.py"""
|
||||
"""Constants and initial table values used in pysource.py and fragments.py"""
|
||||
|
||||
import re, sys
|
||||
from uncompyle6.parsers.astnode import AST
|
||||
@@ -57,9 +57,7 @@ INDENT_PER_LEVEL = ' ' # additional intent per pretty-print level
|
||||
|
||||
TABLE_R = {
|
||||
'STORE_ATTR': ( '%c.%[1]{pattr}', 0),
|
||||
# 'STORE_SUBSCR': ( '%c[%c]', 0, 1 ),
|
||||
'DELETE_ATTR': ( '%|del %c.%[-1]{pattr}\n', 0 ),
|
||||
# 'EXEC_STMT': ( '%|exec %c in %[1]C\n', 0, (0,maxint,', ') ),
|
||||
}
|
||||
|
||||
TABLE_R0 = {
|
||||
@@ -67,8 +65,9 @@ TABLE_R0 = {
|
||||
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
|
||||
# 'CALL_FUNCTION': ( '%c(%P)', 0, (1,-1,', ') ),
|
||||
}
|
||||
|
||||
TABLE_DIRECT = {
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_ADD': ( '+' ,),
|
||||
'BINARY_SUBTRACT': ( '-' ,),
|
||||
'BINARY_MULTIPLY': ( '*' ,),
|
||||
'BINARY_DIVIDE': ( '/' ,),
|
||||
@@ -76,13 +75,13 @@ TABLE_DIRECT = {
|
||||
'BINARY_TRUE_DIVIDE': ( '/' ,), # Not in <= 2.1
|
||||
'BINARY_FLOOR_DIVIDE': ( '//' ,),
|
||||
'BINARY_MODULO': ( '%%',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_POWER': ( '**',),
|
||||
'BINARY_LSHIFT': ( '<<',),
|
||||
'BINARY_RSHIFT': ( '>>',),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'BINARY_AND': ( '&' ,),
|
||||
'BINARY_OR': ( '|' ,),
|
||||
'BINARY_XOR': ( '^' ,),
|
||||
'INPLACE_ADD': ( '+=' ,),
|
||||
'INPLACE_SUBTRACT': ( '-=' ,),
|
||||
'INPLACE_MULTIPLY': ( '*=' ,),
|
||||
'INPLACE_MATRIX_MULTIPLY': ( '@=' ,),
|
||||
@@ -93,125 +92,126 @@ TABLE_DIRECT = {
|
||||
'INPLACE_POWER': ( '**=',),
|
||||
'INPLACE_LSHIFT': ( '<<=',),
|
||||
'INPLACE_RSHIFT': ( '>>=',),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
'INPLACE_AND': ( '&=' ,),
|
||||
'INPLACE_OR': ( '|=' ,),
|
||||
'INPLACE_XOR': ( '^=' ,),
|
||||
'binary_expr': ( '%c %c %c', 0, -1, 1 ),
|
||||
|
||||
'UNARY_POSITIVE': ( '+',),
|
||||
'UNARY_NEGATIVE': ( '-',),
|
||||
'UNARY_INVERT': ( '~%c'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
'UNARY_INVERT': ( '~'),
|
||||
'unary_expr': ( '%c%c', 1, 0),
|
||||
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_not': ( 'not %c', 0 ),
|
||||
'unary_convert': ( '`%c`', 0 ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
'get_iter': ( 'iter(%c)', 0 ),
|
||||
'slice0': ( '%c[:]', 0 ),
|
||||
'slice1': ( '%c[%p:]', 0, (1, 100) ),
|
||||
'slice2': ( '%c[:%p]', 0, (1, 100) ),
|
||||
'slice3': ( '%c[%p:%p]', 0, (1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'load_attr': ( '%c.%[1]{pattr}', 0),
|
||||
'LOAD_FAST': ( '%{pattr}', ),
|
||||
'LOAD_NAME': ( '%{pattr}', ),
|
||||
'LOAD_CLASSNAME': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
'LOAD_GLOBAL': ( '%{pattr}', ),
|
||||
'LOAD_DEREF': ( '%{pattr}', ),
|
||||
'LOAD_LOCALS': ( 'locals()', ),
|
||||
'LOAD_ASSERT': ( '%{pattr}', ),
|
||||
# 'LOAD_CONST': ( '%{pattr}', ), # handled by n_LOAD_CONST
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_FAST': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_NAME': ( '%|del %{pattr}\n', ),
|
||||
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
|
||||
'delete_subscr': ( '%|del %c[%c]\n', 0, 1,),
|
||||
'binary_subscr': ( '%c[%p]', 0, (1, 100)),
|
||||
'binary_subscr2': ( '%c[%p]', 0, (1, 100)),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
'store_subscr': ( '%c[%c]', 0, 1),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'STORE_NAME': ( '%{pattr}', ),
|
||||
'STORE_GLOBAL': ( '%{pattr}', ),
|
||||
'STORE_DEREF': ( '%{pattr}', ),
|
||||
'unpack': ( '%C%,', (1, maxint, ', ') ),
|
||||
|
||||
# This nonterminal we create on the fly in semantic routines
|
||||
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
|
||||
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
'unpack_list': ( '[%C]', (1, maxint, ', ') ),
|
||||
'build_tuple2': ( '%P', (0, -1, ', ', 100) ),
|
||||
|
||||
# 'list_compr': ( '[ %c ]', -2), # handled by n_list_compr
|
||||
'list_iter': ( '%c', 0),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_iter': ( '%c', 0 ),
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
|
||||
'comp_iter': ( '%c', 0),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'comp_iter': ( '%c', 0 ),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
'comp_ifnot': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'comp_body': ( '', ), # ignore when recusing
|
||||
'set_comp_body': ( '%c', 0 ),
|
||||
'gen_comp_body': ( '%c', 0 ),
|
||||
'dict_comp_body': ( '%c:%c', 1, 0 ),
|
||||
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
|
||||
|
||||
# The 2nd parameter should have a = suffix.
|
||||
# There is a rule with a 4th parameter "designator"
|
||||
# which we don't use here.
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
'augassign1': ( '%|%c %c %c\n', 0, 2, 1),
|
||||
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'augassign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4 ),
|
||||
'designList': ( '%c = %c', 0, -1 ),
|
||||
'and': ( '%c and %c', 0, 2 ),
|
||||
'ret_and': ( '%c and %c', 0, 2 ),
|
||||
'and2': ( '%c', 3 ),
|
||||
'or': ( '%c or %c', 0, 2 ),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27)),
|
||||
'conditionalTrue': ( '%p if 1 else %p', (0, 27), (2, 27)),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27)),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27)),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27)),
|
||||
'ret_or': ( '%c or %c', 0, 2 ),
|
||||
'conditional': ( '%p if %p else %p', (2, 27), (0, 27), (4, 27) ),
|
||||
'conditionalTrue': ( '%p if 1 else %p', (0, 27), (2, 27) ),
|
||||
'ret_cond': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27) ),
|
||||
'conditionalnot': ( '%p if not %p else %p', (2, 27), (0, 22), (4, 27) ),
|
||||
'ret_cond_not': ( '%p if not %p else %p', (2, 27), (0, 22), (-1, 27) ),
|
||||
'conditional_lambda': ( '(%c if %c else %c)', 2, 0, 3),
|
||||
'return_lambda': ('%c', 0),
|
||||
'compare': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 29), (1, 30)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
|
||||
'compare': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
|
||||
'cmp_list': ( '%p %p', (0, 29), (1, 30)),
|
||||
'cmp_list1': ( '%[3]{pattr} %p %p', (0, 19), (-2, 19)),
|
||||
'cmp_list2': ( '%[1]{pattr} %p', (0, 19)),
|
||||
# 'classdef': (), # handled by n_classdef()
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdef': ( '\n\n%|def %c\n', -2), # -2 to handle closures
|
||||
'funcdefdeco': ( '\n\n%c', 0),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
|
||||
'mkfuncdeco0': ( '%|def %c\n', 0),
|
||||
'classdefdeco': ( '\n\n%c', 0),
|
||||
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
'kwarg': ( '%[0]{pattr}=%c', 1),
|
||||
'kwargs': ( '%D', (0, maxint, ', ') ),
|
||||
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'assert_expr_or': ( '%c or %c', 0, 2 ),
|
||||
'assert_expr_and': ( '%c and %c', 0, 2 ),
|
||||
'print_items_stmt': ( '%|print %c%c,\n', 0, 2 ), # Python 2 only
|
||||
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2 ),
|
||||
'print_item': ( ', %c', 0),
|
||||
'print_nl': ( '%|print\n', ),
|
||||
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
|
||||
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
|
||||
'print_nl_to': ( '%|print >> %c\n', 0 ),
|
||||
'print_to_items': ( '%C', (0, 2, ', ') ),
|
||||
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'call_stmt': ( '%|%p\n', (0, 200)),
|
||||
'break_stmt': ( '%|break\n', ),
|
||||
'continue_stmt': ( '%|continue\n', ),
|
||||
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
'raise_stmt0': ( '%|raise\n', ),
|
||||
'raise_stmt1': ( '%|raise %c\n', 0),
|
||||
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
|
||||
# 'yield': ( 'yield %c', 0),
|
||||
# 'return_stmt': ( '%|return %c\n', 0),
|
||||
'return_if_stmt': ( 'return %c\n', 0),
|
||||
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'ifstmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmt': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'iflaststmtl': ( '%|if %c:\n%+%c%-', 0, 1 ),
|
||||
'testtrue': ( 'not %p', (0, 22) ),
|
||||
@@ -229,37 +229,37 @@ TABLE_DIRECT = {
|
||||
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
|
||||
|
||||
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
|
||||
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
|
||||
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
|
||||
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
|
||||
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'forstmt': ( '%|for %c in %c:\n%+%c%-\n\n', 3, 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2 ),
|
||||
'forelselaststmt': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2),
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-', 3, 1, 4, -2 ),
|
||||
'forelselaststmtl': (
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, 1, 4, -2 ),
|
||||
'trystmt': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
|
||||
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
|
||||
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
|
||||
'tf_trystmt': ( '%c%-%c%+', 1, 3 ),
|
||||
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
|
||||
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
|
||||
'except': ( '%|except:\n%+%c%-', 3 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_cond1': ( '%|except %c:\n', 1 ),
|
||||
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
|
||||
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
'passstmt': ( '%|pass\n', ),
|
||||
'STORE_FAST': ( '%{pattr}', ),
|
||||
'kv': ( '%c: %c', 3, 1 ),
|
||||
'kv2': ( '%c: %c', 1, 2 ),
|
||||
'mapexpr': ( '{%[1]C}', (0, maxint, ', ') ),
|
||||
'importstmt': ( '%|import %c\n', 2),
|
||||
'importfrom': ( '%|from %[2]{pattr} import %c\n', 3 ),
|
||||
'importstar': ( '%|from %[2]{pattr} import *\n', ),
|
||||
}
|
||||
|
||||
|
||||
@@ -276,7 +276,7 @@ MAP = {
|
||||
}
|
||||
|
||||
# Operator precidence
|
||||
# See https://docs.python.org/3/reference/expressions.html
|
||||
# See https://docs.python.org/2/reference/expressions.html
|
||||
# or https://docs.python.org/3/reference/expressions.html
|
||||
# for a list.
|
||||
PRECEDENCE = {
|
||||
@@ -332,6 +332,7 @@ PRECEDENCE = {
|
||||
'ret_or': 26,
|
||||
|
||||
'conditional': 28,
|
||||
'conditional_lamdba': 28,
|
||||
'conditionalnot': 28,
|
||||
'ret_cond': 28,
|
||||
'ret_cond_not': 28,
|
||||
|
@@ -1,6 +1,4 @@
|
||||
# Copyright (c) 2015, 2016 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 1999 John Aycock
|
||||
|
||||
"""
|
||||
@@ -8,8 +6,8 @@ Creates Python source code from an uncompyle6 abstract syntax tree,
|
||||
and indexes fragments which can be accessed by instruction offset
|
||||
address.
|
||||
|
||||
See the comments in pysource for information on the abstract sytax tree
|
||||
and how semantic actions are written.
|
||||
See https://github.com/rocky/python-uncompyle6/wiki/Table-driven-semantic-actions.
|
||||
for a more complete explanation, nicely marked up and with examples.
|
||||
|
||||
We add some format specifiers here not used in pysource
|
||||
|
||||
@@ -40,7 +38,8 @@ do it recursively which is where offsets are probably located.
|
||||
2. %b
|
||||
-----
|
||||
|
||||
%b associates the text from the previous start node up to what we have now
|
||||
%b associates the text from the specified index to what we have now.
|
||||
it takes an integer argument.
|
||||
|
||||
For example in:
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
@@ -96,9 +95,8 @@ TABLE_DIRECT_FRAGMENT = {
|
||||
'importfrom': ( '%|from %[2]{pattr}%x import %c\n', (2, (0, 1)), 3),
|
||||
'importmultiple': ( '%|import%b %c%c\n', 0, 2, 3 ),
|
||||
'list_for': (' for %c%x in %c%c', 2, (2, (1, )), 0, 3 ),
|
||||
'forstmt': ( '%|for%b %c%x in %c:\n%+%c%-\n\n', 0, 3, (3, (2, )), 1, 4 ),
|
||||
'forelsestmt': (
|
||||
'%|for %c in %c%x:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-\n\n', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmt': (
|
||||
'%|for %c%x in %c:\n%+%c%-%|else:\n%+%c%-', 3, (3, (2,)), 1, 4, -2),
|
||||
'forelselaststmtl': (
|
||||
@@ -310,11 +308,11 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def n_expr(self, node):
|
||||
start = len(self.f.getvalue())
|
||||
p = self.prec
|
||||
if node[0].type.startswith('binary_expr'):
|
||||
if node[0].kind.startswith('binary_expr'):
|
||||
n = node[0][-1][0]
|
||||
else:
|
||||
n = node[0]
|
||||
self.prec = PRECEDENCE.get(n.type, -2)
|
||||
self.prec = PRECEDENCE.get(n.kind, -2)
|
||||
if n == 'LOAD_CONST' and repr(n.pattr)[0] == '-':
|
||||
n.parent = node
|
||||
self.set_pos_info(n, start, len(self.f.getvalue()))
|
||||
@@ -407,7 +405,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
def n_ifelsestmtr(self, node):
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.type = 'ifelsestmtr2'
|
||||
node.kind = 'ifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
if len(return_stmts_node) != 2:
|
||||
@@ -424,10 +422,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(self.indent, 'if ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[1].parent = node
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
if_ret_at_end = False
|
||||
if len(node[2][0]) >= 3:
|
||||
@@ -440,23 +438,23 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
for n in return_stmts_node[0]:
|
||||
if (n[0] == 'ifstmt' and n[0][1][0] == 'return_if_stmts'):
|
||||
if prev_stmt_is_if_ret:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
prev_stmt_is_if_ret = True
|
||||
else:
|
||||
prev_stmt_is_if_ret = False
|
||||
if not past_else and not if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
past_else = True
|
||||
n.parent = node
|
||||
self.preorder(n)
|
||||
if not past_else or if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[2][1].parent = node
|
||||
self.preorder(node[2][1])
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
|
||||
def n_elifelsestmtr(self, node):
|
||||
@@ -473,20 +471,20 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
node[0].parent = node
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[1].parent = node
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
for n in node[2][0]:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
n.parent = node
|
||||
self.preorder(n)
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
node[2][1].parent = node
|
||||
self.preorder(node[2][1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
self.prune()
|
||||
|
||||
@@ -495,7 +493,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
iname = node[0].pattr
|
||||
|
||||
store_import_node = node[-1][-1]
|
||||
assert store_import_node.type.startswith('STORE_')
|
||||
assert store_import_node.kind.startswith('STORE_')
|
||||
|
||||
sname = store_import_node.pattr
|
||||
self.write(iname)
|
||||
@@ -530,7 +528,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write(func_name)
|
||||
self.set_pos_info(code_node, start, len(self.f.getvalue()))
|
||||
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
start = len(self.f.getvalue())
|
||||
self.make_function(node, isLambda=False, codeNode=code_node)
|
||||
|
||||
@@ -540,7 +538,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def n_list_compr(self, node):
|
||||
@@ -556,7 +554,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
elif n == 'list_if': n = n[2]
|
||||
elif n == 'list_if_not': n= n[2]
|
||||
assert n == 'lc_body'
|
||||
if node[0].type.startswith('BUILD_LIST'):
|
||||
if node[0].kind.startswith('BUILD_LIST'):
|
||||
start = len(self.f.getvalue())
|
||||
self.set_pos_info(node[0], start, start+1)
|
||||
self.write( '[ ')
|
||||
@@ -689,7 +687,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.type in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert designator, "Couldn't find designator in list/set comprehension"
|
||||
|
||||
old_name = self.name
|
||||
@@ -716,7 +714,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.preorder(if_node)
|
||||
self.prec = p
|
||||
self.name = old_name
|
||||
if node[-1].type.startswith('CALL_FUNCTION'):
|
||||
if node[-1].kind.startswith('CALL_FUNCTION'):
|
||||
self.set_pos_info(node[-1], gen_start, len(self.f.getvalue()))
|
||||
|
||||
def listcomprehension_walk2(self, node):
|
||||
@@ -745,7 +743,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -791,7 +789,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
start = len(self.f.getvalue())
|
||||
self.set_pos_info(node[0], start-1, start)
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
elif node[0].type == 'load_closure':
|
||||
elif node[0].kind == 'load_closure':
|
||||
self.setcomprehension_walk3(node, collection_index=4)
|
||||
else:
|
||||
self.comprehension_walk(node, iter_index=4)
|
||||
@@ -810,7 +808,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.set_pos_info(node[0], start, len(self.f.getvalue()))
|
||||
self.write(': {')
|
||||
start = len(self.f.getvalue())
|
||||
assert node[0].type.startswith('BUILD_SET')
|
||||
assert node[0].kind.startswith('BUILD_SET')
|
||||
self.set_pos_info(node[0], start-1, start)
|
||||
designator = node[3]
|
||||
assert designator == 'designator'
|
||||
@@ -819,7 +817,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
fin = len(self.f.getvalue())
|
||||
self.set_pos_info(designator, start, fin)
|
||||
for_iter_node = node[2]
|
||||
assert for_iter_node.type == 'FOR_ITER'
|
||||
assert for_iter_node.kind == 'FOR_ITER'
|
||||
self.set_pos_info(for_iter_node, start, fin)
|
||||
self.write(" for ")
|
||||
self.preorder(designator)
|
||||
@@ -838,7 +836,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
|
||||
def n_listcomp(self, node):
|
||||
self.write('[')
|
||||
if node[0].type == 'load_closure':
|
||||
if node[0].kind == 'load_closure':
|
||||
self.listcomprehension_walk2(node)
|
||||
else:
|
||||
if node[0] == 'LOAD_LISTCOMP':
|
||||
@@ -852,7 +850,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
if len(node) > 1:
|
||||
if (node[0] == 'c_stmts_opt' and
|
||||
node[0][0] == 'passstmt' and
|
||||
node[1].type.startswith('JUMP_FORWARD')):
|
||||
node[1].kind.startswith('JUMP_FORWARD')):
|
||||
self.set_pos_info(node[1], node[0][0].start, node[0][0].finish)
|
||||
|
||||
def setcomprehension_walk3(self, node, collection_index):
|
||||
@@ -883,7 +881,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -989,9 +987,9 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.println(':')
|
||||
|
||||
# class body
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.build_class(subclass)
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
self.currentclass = cclass
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
@@ -1046,8 +1044,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# NOTE: this differs from behavior in pysource.py
|
||||
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-2].type != 'LOAD_CONST':
|
||||
if tokens[-1].kind == 'RETURN_VALUE':
|
||||
if tokens[-2].kind != 'LOAD_CONST':
|
||||
tokens.append(Token('RETURN_LAST'))
|
||||
if len(tokens) == 0:
|
||||
return
|
||||
@@ -1301,10 +1299,10 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# as a custom rule
|
||||
start = len(self.f.getvalue())
|
||||
n = len(node)-1
|
||||
assert node[n].type.startswith('CALL_FUNCTION')
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].type in ['expr', 'LOAD_CLASSNAME']:
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
|
||||
@@ -1332,14 +1330,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
line_seperator = ',\n' + self.indent
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
start = len(self.f.getvalue())
|
||||
self.write('{')
|
||||
|
||||
if self.version > 3.0:
|
||||
if node[0].type.startswith('kvlist'):
|
||||
if node[0].kind.startswith('kvlist'):
|
||||
# Python 3.5+ style key/value list in mapexpr
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
@@ -1354,11 +1352,11 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
i += 2
|
||||
pass
|
||||
pass
|
||||
elif node[1].type.startswith('kvlist'):
|
||||
elif node[1].kind.startswith('kvlist'):
|
||||
# Python 3.0..3.4 style key/value list in mapexpr
|
||||
kv_node = node[1]
|
||||
l = list(kv_node)
|
||||
if len(l) > 0 and l[0].type == 'kv3':
|
||||
if len(l) > 0 and l[0].kind == 'kv3':
|
||||
# Python 3.2 does this
|
||||
kv_node = node[1][0]
|
||||
l = list(kv_node)
|
||||
@@ -1381,7 +1379,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
pass
|
||||
else:
|
||||
# Python 2 style kvlist
|
||||
assert node[-1].type.startswith('kvlist')
|
||||
assert node[-1].kind.startswith('kvlist')
|
||||
kv_node = node[-1] # goto kvlist
|
||||
|
||||
for kv in kv_node:
|
||||
@@ -1409,7 +1407,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n.parent = node
|
||||
self.set_pos_info(n, start, finish)
|
||||
self.set_pos_info(node, start, finish)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
@@ -1420,7 +1418,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
n = node.pop()
|
||||
lastnode = n.type
|
||||
lastnode = n.kind
|
||||
start = len(self.f.getvalue())
|
||||
if lastnode.startswith('BUILD_LIST'):
|
||||
self.write('['); endchar = ']'
|
||||
@@ -1445,7 +1443,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
if len(node) > 3:
|
||||
line_separator = ',\n' + self.indent
|
||||
else:
|
||||
@@ -1470,14 +1468,14 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
n.parent = node.parent
|
||||
self.set_pos_info(n, start, finish)
|
||||
self.set_pos_info(node, start, finish)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
def engine(self, entry, startnode):
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
%c, %C, and so on.
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
|
||||
# print("-----")
|
||||
@@ -1514,8 +1512,8 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
self.write('%')
|
||||
self.set_pos_info(node, start, len(self.f.getvalue()))
|
||||
|
||||
elif typ == '+': self.indentMore()
|
||||
elif typ == '-': self.indentLess()
|
||||
elif typ == '+': self.indent_more()
|
||||
elif typ == '-': self.indent_less()
|
||||
elif typ == '|': self.write(self.indent)
|
||||
# no longer used, since BUILD_TUPLE_n is pretty printed:
|
||||
elif typ == 'r': recurse_node = True
|
||||
@@ -1535,7 +1533,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# for loops have two positions that correspond to a single text
|
||||
# location. In "for i in ..." there is the initialization "i" code as well
|
||||
# as the iteration code with "i"
|
||||
match = re.search(r'^for', startnode.type)
|
||||
match = re.search(r'^for', startnode.kind)
|
||||
if match and entry[arg] == 3:
|
||||
self.set_pos_info(node[0], start, finish)
|
||||
for n in node[2]:
|
||||
@@ -1629,7 +1627,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
||||
# 2. subroutine calls. It the last op is the call and for purposes of printing
|
||||
# we don't need to print anything special there. However it encompases the
|
||||
# entire string of the node fn(...)
|
||||
match = re.search(r'^call_function', startnode.type)
|
||||
match = re.search(r'^call_function', startnode.kind)
|
||||
if match:
|
||||
last_node = startnode[-1]
|
||||
# import traceback; traceback.print_stack()
|
||||
@@ -1770,7 +1768,7 @@ if __name__ == '__main__':
|
||||
nodeInfo = walk.offsets[name, offset]
|
||||
node = nodeInfo.node
|
||||
extractInfo = walk.extract_node_info(node)
|
||||
print("code: %s" % node.type)
|
||||
print("code: %s" % node.kind)
|
||||
# print extractInfo
|
||||
print(extractInfo.selectedText)
|
||||
print(extractInfo.selectedLine)
|
||||
@@ -1780,7 +1778,7 @@ if __name__ == '__main__':
|
||||
print("Contained in...")
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
print("code: %s" % p.type)
|
||||
print("code: %s" % p.kind)
|
||||
print('=' * 40)
|
||||
pass
|
||||
pass
|
||||
@@ -1799,7 +1797,7 @@ if __name__ == '__main__':
|
||||
nodeInfo = walk.offsets[name, offset]
|
||||
node = nodeInfo.node
|
||||
extractInfo = walk.extract_node_info(node)
|
||||
print("code: %s" % node.type)
|
||||
print("code: %s" % node.kind)
|
||||
# print extractInfo
|
||||
print(extractInfo.selectedText)
|
||||
print(extractInfo.selectedLine)
|
||||
@@ -1809,7 +1807,7 @@ if __name__ == '__main__':
|
||||
print("Contained in...")
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
print("code: %s" % p.type)
|
||||
print("code: %s" % p.kind)
|
||||
print('=' * 40)
|
||||
pass
|
||||
pass
|
||||
|
@@ -17,7 +17,7 @@ def find_all_globals(node, globs):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_all_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL', 'LOAD_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
@@ -26,7 +26,7 @@ def find_globals(node, globs):
|
||||
for n in node:
|
||||
if isinstance(n, AST):
|
||||
globs = find_globals(n, globs)
|
||||
elif n.type in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
elif n.kind in ('STORE_GLOBAL', 'DELETE_GLOBAL'):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
@@ -36,7 +36,7 @@ def find_none(node):
|
||||
if n not in ('return_stmt', 'return_if_stmt'):
|
||||
if find_none(n):
|
||||
return True
|
||||
elif n.type == 'LOAD_CONST' and n.pattr is None:
|
||||
elif n.kind == 'LOAD_CONST' and n.pattr is None:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -64,7 +64,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
annotate_tuple = None
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
@@ -80,7 +80,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
i = -1
|
||||
j = annotate_last-1
|
||||
l = -len(node)
|
||||
while j >= l and node[j].type in ('annotate_arg' 'annotate_tuple'):
|
||||
while j >= l and node[j].kind in ('annotate_arg' 'annotate_tuple'):
|
||||
annotate_args[annotate_tup[i]] = node[j][0]
|
||||
i -= 1
|
||||
j -= 1
|
||||
@@ -106,7 +106,7 @@ def make_function3_annotate(self, node, isLambda, nested=1,
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -318,7 +318,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
@@ -334,7 +334,7 @@ def make_function2(self, node, isLambda, nested=1, codeNode=None):
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -450,7 +450,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].type.startswith('MAKE_')
|
||||
assert node[-1].kind.startswith('MAKE_')
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
@@ -484,7 +484,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and isLambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].type == 'LOAD_LAMBDA'
|
||||
assert node[lambda_index].kind == 'LOAD_LAMBDA'
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = codeNode.attr
|
||||
@@ -585,7 +585,7 @@ def make_function3(self, node, isLambda, nested=1, codeNode=None):
|
||||
for n in node:
|
||||
if n == 'pos_arg':
|
||||
continue
|
||||
elif self.version >= 3.4 and not (n.type in ('kwargs', 'kwarg')):
|
||||
elif self.version >= 3.4 and not (n.kind in ('kwargs', 'kwarg')):
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
|
@@ -11,62 +11,102 @@ and what they mean).
|
||||
|
||||
Upper levels of the grammar is a more-or-less conventional grammar for
|
||||
Python.
|
||||
|
||||
Semantic action rules for nonterminal symbols can be specified here by
|
||||
creating a method prefaced with "n_" for that nonterminal. For
|
||||
example, "n_exec_stmt" handles the semantic actions for the
|
||||
"exec_smnt" nonterminal symbol. Similarly if a method with the name
|
||||
of the nonterminal is suffixed with "_exit" it will be called after
|
||||
all of its children are called.
|
||||
|
||||
Another other way to specify a semantic rule for a nonterminal is via
|
||||
rule given in one of the tables MAP_R0, MAP_R, or MAP_DIRECT.
|
||||
|
||||
These uses a printf-like syntax to direct substitution from attributes
|
||||
of the nonterminal and its children..
|
||||
|
||||
The rest of the below describes how table-driven semantic actions work
|
||||
and gives a list of the format specifiers. The default() and engine()
|
||||
methods implement most of the below.
|
||||
|
||||
Step 1 determines a table (T) and a path to a
|
||||
table key (K) from the node type (N) (other nodes are shown as O):
|
||||
|
||||
N N N&K
|
||||
/ | ... \ / | ... \ / | ... \
|
||||
O O O O O K O O O
|
||||
|
|
||||
K
|
||||
|
||||
MAP_R0 (TABLE_R0) MAP_R (TABLE_R) MAP_DIRECT (TABLE_DIRECT)
|
||||
|
||||
The default is a direct mapping. The key K is then extracted from the
|
||||
subtree and used to find a table entry T[K], if any. The result is a
|
||||
format string and arguments (a la printf()) for the formatting engine.
|
||||
Escapes in the format string are:
|
||||
|
||||
%c evaluate children N[A] recursively*
|
||||
%C evaluate children N[A[0]]..N[A[1]-1] recursively, separate by A[2]*
|
||||
%P same as %C but sets operator precedence
|
||||
%D same as %C but is for left-recursive lists like kwargs which
|
||||
goes to epsilon at the beginning. Using %C an extra separator
|
||||
with an epsilon appears at the beginning
|
||||
%, print ',' if last %C only printed one item. This is mostly for tuples
|
||||
on the LHS of an assignment statement since BUILD_TUPLE_n pretty-prints
|
||||
other tuples.
|
||||
%| tab to current indentation level
|
||||
%+ increase current indentation level
|
||||
%- decrease current indentation level
|
||||
%{...} evaluate ... in context of N
|
||||
%% literal '%'
|
||||
%p evaluate N setting precedence
|
||||
|
||||
|
||||
* indicates an argument (A) required.
|
||||
|
||||
The '%' may optionally be followed by a number (C) in square brackets, which
|
||||
makes the engine walk down to N[C] before evaluating the escape code.
|
||||
"""
|
||||
|
||||
# The below is a bit long, but still it is somehwat abbreviated.
|
||||
# See https://github.com/rocky/python-uncompyle6/wiki/Table-driven-semantic-actions.
|
||||
# for a more complete explanation, nicely marked up and with examples.
|
||||
#
|
||||
#
|
||||
# Semantic action rules for nonterminal symbols can be specified here by
|
||||
# creating a method prefaced with "n_" for that nonterminal. For
|
||||
# example, "n_exec_stmt" handles the semantic actions for the
|
||||
# "exec_stmt" nonterminal symbol. Similarly if a method with the name
|
||||
# of the nonterminal is suffixed with "_exit" it will be called after
|
||||
# all of its children are called.
|
||||
#
|
||||
# After a while writing methods this way, you'll find many routines which do similar
|
||||
# sorts of things, and soon you'll find you want a short notation to
|
||||
# describe rules and not have to create methods at all.
|
||||
#
|
||||
# So another other way to specify a semantic rule for a nonterminal is via
|
||||
# one of the tables MAP_R0, MAP_R, or MAP_DIRECT where the key is the
|
||||
# nonterminal name.
|
||||
#
|
||||
# These dictionaries use a printf-like syntax to direct substitution
|
||||
# from attributes of the nonterminal and its children..
|
||||
#
|
||||
# The rest of the below describes how table-driven semantic actions work
|
||||
# and gives a list of the format specifiers. The default() and
|
||||
# template_engine() methods implement most of the below.
|
||||
#
|
||||
# We allow for a couple of ways to interact with a node in a tree. So
|
||||
# step 1 after not seeing a custom method for a nonterminal is to
|
||||
# determine from what point of view tree-wise the rule is applied.
|
||||
|
||||
# In the diagram below, N is a nonterminal name, and K also a nonterminal
|
||||
# name but the one used as a key in the table.
|
||||
# we show where those are with respect to each other in the
|
||||
# AST tree for N.
|
||||
#
|
||||
#
|
||||
# N&K N N
|
||||
# / | ... \ / | ... \ / | ... \
|
||||
# O O O O O K O O O
|
||||
# |
|
||||
# K
|
||||
# TABLE_DIRECT TABLE_R TABLE_R0
|
||||
#
|
||||
# The default table is TABLE_DIRECT mapping By far, most rules used work this way.
|
||||
# TABLE_R0 is rarely used.
|
||||
#
|
||||
# The key K is then extracted from the subtree and used to find one
|
||||
# of the tables, T listed above. The result after applying T[K] is
|
||||
# a format string and arguments (a la printf()) for the formatting
|
||||
# engine.
|
||||
#
|
||||
# Escapes in the format string are:
|
||||
#
|
||||
# %c evaluate the node recursively. Its argument is a single
|
||||
# integer representing a node index.
|
||||
#
|
||||
# %p like %c but sets the operator precedence.
|
||||
# Its argument then is a tuple indicating the node
|
||||
# index and the precidence value, an integer.
|
||||
#
|
||||
# %C evaluate children recursively, with sibling children separated by the
|
||||
# given string. It needs a 3-tuple: a starting node, the maximimum
|
||||
# value of an end node, and a string to be inserted between sibling children
|
||||
#
|
||||
# %, Append ',' if last %C only printed one item. This is mostly for tuples
|
||||
# on the LHS of an assignment statement since BUILD_TUPLE_n pretty-prints
|
||||
# other tuples. The specifier takes no arguments
|
||||
#
|
||||
# %P same as %C but sets operator precedence. Its argument is a 4-tuple:
|
||||
# the node low and high indices, the separator, a string the precidence
|
||||
# value, an integer.
|
||||
#
|
||||
# %D Same as `%C` this is for left-recursive lists like kwargs where goes
|
||||
# to epsilon at the beginning. It needs a 3-tuple: a starting node, the
|
||||
# maximimum value of an end node, and a string to be inserted between
|
||||
# sibling children. If we were to use `%C` an extra separator with an
|
||||
# epsilon would appear at the beginning.
|
||||
#
|
||||
# %| Insert spaces to the current indentation level. Takes no arguments.
|
||||
#
|
||||
# %+ increase current indentation level. Takes no arguments.
|
||||
#
|
||||
# %- decrease current indentation level. Takes no arguments.
|
||||
#
|
||||
# %{...} evaluate ... in context of N
|
||||
#
|
||||
# %% literal '%'. Takes no arguments.
|
||||
#
|
||||
#
|
||||
# The '%' may optionally be followed by a number (C) in square
|
||||
# brackets, which makes the template_engine walk down to N[C] before
|
||||
# evaluating the escape code.
|
||||
|
||||
import sys
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
@@ -122,6 +162,29 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||
compile_mode='exec', is_pypy=False,
|
||||
linestarts={}):
|
||||
"""version is the Python version (a float) of the Python dialect
|
||||
|
||||
of both the AST and language we should produce.
|
||||
|
||||
out is IO-like file pointer to where the output should go. It
|
||||
whould have a getvalue() method.
|
||||
|
||||
scanner is a method to call when we need to scan tokens. Sometimes
|
||||
in producing output we will run across further tokens that need
|
||||
to be scaned.
|
||||
|
||||
If showast is True, we print the AST tree.
|
||||
|
||||
compile_mode is is either 'exec' or 'single'. It isthe compile
|
||||
mode that was used to create the AST and specifies a gramar variant within
|
||||
a Python version to use.
|
||||
|
||||
is_pypy should be True if the AST was generated for PyPy.
|
||||
|
||||
linestarts is a dictionary of line number to bytecode offset. This
|
||||
can sometimes assist in determinte which kind of source-code construct
|
||||
to use when there is ambiguity.
|
||||
"""
|
||||
GenericASTTraversal.__init__(self, ast=None)
|
||||
self.scanner = scanner
|
||||
params = {
|
||||
@@ -304,7 +367,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# MAKE_FUNCTION ..
|
||||
code = node[-3]
|
||||
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
for annotate_last in range(len(node)-1, -1, -1):
|
||||
if node[annotate_last] == 'annotate_tuple':
|
||||
break
|
||||
@@ -324,7 +387,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
self.n_mkfunc_annotate = n_mkfunc_annotate
|
||||
|
||||
@@ -359,8 +422,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node.type == 'call_function'
|
||||
p = self.prec
|
||||
self.prec = 80
|
||||
self.engine(('%c(%P)', 0, (1, -4, ', ', 100)), node)
|
||||
self.template_engine(('%c(%P)', 0,
|
||||
(1, -4, ', ', 100)), node)
|
||||
self.prec = p
|
||||
node.type == 'async_call_function'
|
||||
self.prune()
|
||||
self.n_async_call_function = n_async_call_function
|
||||
self.n_build_list_unpack = self.n_build_list
|
||||
@@ -373,13 +438,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
for i in mapping[1:]:
|
||||
key = key[i]
|
||||
pass
|
||||
if key.type.startswith('CALL_FUNCTION_VAR_KW'):
|
||||
if key.kind.startswith('CALL_FUNCTION_VAR_KW'):
|
||||
# Python 3.5 changes the stack position of *args. kwargs come
|
||||
# after *args whereas in earlier Pythons, *args is at the end
|
||||
# which simpilfiies things from our perspective.
|
||||
# Python 3.6+ replaces CALL_FUNCTION_VAR_KW with CALL_FUNCTION_EX
|
||||
# We will just swap the order to make it look like earlier Python 3.
|
||||
entry = table[key.type]
|
||||
entry = table[key.kind]
|
||||
kwarg_pos = entry[2][1]
|
||||
args_pos = kwarg_pos - 1
|
||||
# Put last node[args_pos] after subsequent kwargs
|
||||
@@ -400,9 +465,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
is_code = hasattr(code_node, 'attr') and iscode(code_node.attr)
|
||||
if (is_code and
|
||||
(code_node.attr.co_flags & COMPILER_FLAG_BIT['COROUTINE'])):
|
||||
self.engine(('\n\n%|async def %c\n', -2), node)
|
||||
self.template_engine(('\n\n%|async def %c\n',
|
||||
-2), node)
|
||||
else:
|
||||
self.engine(('\n\n%|def %c\n', -2), node)
|
||||
self.template_engine(('\n\n%|def %c\n', -2),
|
||||
node)
|
||||
self.prune()
|
||||
self.n_funcdef = n_funcdef
|
||||
|
||||
@@ -500,10 +567,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
super(SourceWalker, self).preorder(node)
|
||||
self.set_pos_info(node)
|
||||
|
||||
def indentMore(self, indent=TAB):
|
||||
def indent_more(self, indent=TAB):
|
||||
self.indent += indent
|
||||
|
||||
def indentLess(self, indent=TAB):
|
||||
def indent_less(self, indent=TAB):
|
||||
self.indent = self.indent[:-len(indent)]
|
||||
|
||||
def traverse(self, node, indent=None, isLambda=False):
|
||||
@@ -578,6 +645,20 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
node == AST('return_stmt',
|
||||
[AST('ret_expr', [NONE]), Token('RETURN_VALUE')]))
|
||||
|
||||
# Python 3.x can have be dead code as a result of its optimization?
|
||||
# So we'll add a # at the end of the return lambda so the rest is ignored
|
||||
def n_return_lambda(self, node):
|
||||
if 1 <= len(node) <= 2:
|
||||
self.preorder(node[0])
|
||||
self.write(' # Avoid dead code: ')
|
||||
self.prune()
|
||||
else:
|
||||
# We can't comment out like above because there may be a trailing ')'
|
||||
# that needs to be written
|
||||
assert len(node) == 3 and node[2] == 'LAMBDA_MARKER'
|
||||
self.preorder(node[0])
|
||||
self.prune()
|
||||
|
||||
def n_return_stmt(self, node):
|
||||
if self.params['isLambda']:
|
||||
self.preorder(node[0])
|
||||
@@ -595,6 +676,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_return_if_stmt(self, node):
|
||||
if self.params['isLambda']:
|
||||
self.write(' return ')
|
||||
self.preorder(node[0])
|
||||
self.prune()
|
||||
else:
|
||||
@@ -651,12 +733,12 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_expr(self, node):
|
||||
p = self.prec
|
||||
if node[0].type.startswith('binary_expr'):
|
||||
if node[0].kind.startswith('binary_expr'):
|
||||
n = node[0][-1][0]
|
||||
else:
|
||||
n = node[0]
|
||||
|
||||
self.prec = PRECEDENCE.get(n.type, -2)
|
||||
self.prec = PRECEDENCE.get(n.kind, -2)
|
||||
if n == 'LOAD_CONST' and repr(n.pattr)[0] == '-':
|
||||
self.prec = 6
|
||||
|
||||
@@ -739,9 +821,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
|
||||
def n_delete_subscr(self, node):
|
||||
if node[-2][0] == 'build_list' and node[-2][0][-1].type.startswith('BUILD_TUPLE'):
|
||||
if node[-2][0] == 'build_list' and node[-2][0][-1].kind.startswith('BUILD_TUPLE'):
|
||||
if node[-2][0][-1] != 'BUILD_TUPLE_0':
|
||||
node[-2][0].type = 'build_tuple2'
|
||||
node[-2][0].kind = 'build_tuple2'
|
||||
self.default(node)
|
||||
|
||||
n_store_subscr = n_binary_subscr = n_delete_subscr
|
||||
@@ -750,9 +832,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_tryfinallystmt(self, node):
|
||||
if len(node[1][0]) == 1 and node[1][0][0] == 'stmt':
|
||||
if node[1][0][0][0] == 'trystmt':
|
||||
node[1][0][0][0].type = 'tf_trystmt'
|
||||
node[1][0][0][0].kind = 'tf_trystmt'
|
||||
if node[1][0][0][0] == 'tryelsestmt':
|
||||
node[1][0][0][0].type = 'tf_tryelsestmt'
|
||||
node[1][0][0][0].kind = 'tf_tryelsestmt'
|
||||
self.default(node)
|
||||
|
||||
def n_exec_stmt(self, node):
|
||||
@@ -777,26 +859,26 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
if len(n) == 1 == len(n[0]) and n[0] == '_stmts':
|
||||
n = n[0][0][0]
|
||||
elif n[0].type in ('lastc_stmt', 'lastl_stmt'):
|
||||
elif n[0].kind in ('lastc_stmt', 'lastl_stmt'):
|
||||
n = n[0][0]
|
||||
else:
|
||||
if not preprocess:
|
||||
self.default(node)
|
||||
return
|
||||
|
||||
if n.type in ('ifstmt', 'iflaststmt', 'iflaststmtl'):
|
||||
node.type = 'ifelifstmt'
|
||||
n.type = 'elifstmt'
|
||||
elif n.type in ('ifelsestmtr',):
|
||||
node.type = 'ifelifstmt'
|
||||
n.type = 'elifelsestmtr'
|
||||
elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
node.type = 'ifelifstmt'
|
||||
if n.kind in ('ifstmt', 'iflaststmt', 'iflaststmtl'):
|
||||
node.kind = 'ifelifstmt'
|
||||
n.kind = 'elifstmt'
|
||||
elif n.kind in ('ifelsestmtr',):
|
||||
node.kind = 'ifelifstmt'
|
||||
n.kind = 'elifelsestmtr'
|
||||
elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
node.kind = 'ifelifstmt'
|
||||
self.n_ifelsestmt(n, preprocess=True)
|
||||
if n == 'ifelifstmt':
|
||||
n.type = 'elifelifstmt'
|
||||
elif n.type in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
n.type = 'elifelsestmt'
|
||||
n.kind = 'elifelifstmt'
|
||||
elif n.kind in ('ifelsestmt', 'ifelsestmtc', 'ifelsestmtl'):
|
||||
n.kind = 'elifelsestmt'
|
||||
if not preprocess:
|
||||
self.default(node)
|
||||
|
||||
@@ -805,7 +887,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def n_ifelsestmtr(self, node):
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.type = 'ifelsestmtr2'
|
||||
node.kind = 'ifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
if len(return_stmts_node) != 2:
|
||||
@@ -821,9 +903,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(self.indent, 'if ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
if_ret_at_end = False
|
||||
if len(return_stmts_node[0]) >= 3:
|
||||
@@ -836,27 +918,27 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
for n in return_stmts_node[0]:
|
||||
if (n[0] == 'ifstmt' and n[0][1][0] == 'return_if_stmts'):
|
||||
if prev_stmt_is_if_ret:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
prev_stmt_is_if_ret = True
|
||||
else:
|
||||
prev_stmt_is_if_ret = False
|
||||
if not past_else and not if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
past_else = True
|
||||
self.preorder(n)
|
||||
if not past_else or if_ret_at_end:
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(return_stmts_node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
n_ifelsestmtr2 = n_ifelsestmtr
|
||||
|
||||
def n_elifelsestmtr(self, node):
|
||||
if node[2] == 'COME_FROM':
|
||||
return_stmts_node = node[3]
|
||||
node.type = 'elifelsestmtr2'
|
||||
node.kind = 'elifelsestmtr2'
|
||||
else:
|
||||
return_stmts_node = node[2]
|
||||
|
||||
@@ -871,22 +953,22 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(self.indent, 'elif ')
|
||||
self.preorder(node[0])
|
||||
self.println(':')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
for n in return_stmts_node[0]:
|
||||
n[0].type = 'elifstmt'
|
||||
n[0].kind = 'elifstmt'
|
||||
self.preorder(n)
|
||||
self.println(self.indent, 'else:')
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.preorder(return_stmts_node[1])
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune()
|
||||
|
||||
def n_import_as(self, node):
|
||||
store_node = node[-1][-1]
|
||||
assert store_node.type.startswith('STORE_')
|
||||
assert store_node.kind.startswith('STORE_')
|
||||
iname = node[0].pattr # import name
|
||||
sname = store_node.pattr # store_name
|
||||
if iname and iname == sname or iname.startswith(sname + '.'):
|
||||
@@ -922,14 +1004,14 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
func_name = code_node.attr.co_name
|
||||
self.write(func_name)
|
||||
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.make_function(node, isLambda=False, codeNode=code_node)
|
||||
|
||||
if len(self.param_stack) > 1:
|
||||
self.write('\n\n')
|
||||
else:
|
||||
self.write('\n\n\n')
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
|
||||
def make_function(self, node, isLambda, nested=1,
|
||||
@@ -1006,7 +1088,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 27
|
||||
if node[-1].type == 'list_iter':
|
||||
if node[-1].kind == 'list_iter':
|
||||
n = node[-1]
|
||||
elif self.is_pypy and node[-1] == 'JUMP_BACK':
|
||||
n = node[-2]
|
||||
@@ -1130,7 +1212,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write('{')
|
||||
if node[0] in ['LOAD_SETCOMP', 'LOAD_DICTCOMP']:
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
elif node[0].type == 'load_closure' and self.version >= 3.0:
|
||||
elif node[0].kind == 'load_closure' and self.version >= 3.0:
|
||||
self.setcomprehension_walk3(node, collection_index=4)
|
||||
else:
|
||||
self.comprehension_walk(node, iter_index=4)
|
||||
@@ -1197,7 +1279,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
# Python 3.5+ starts including setcomp_func
|
||||
assert n.type in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert designator, "Couldn't find designator in list/set comprehension"
|
||||
|
||||
self.preorder(n[0])
|
||||
@@ -1247,7 +1329,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -1268,7 +1350,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_listcomp(self, node):
|
||||
self.write('[')
|
||||
if node[0].type == 'load_closure':
|
||||
if node[0].kind == 'load_closure':
|
||||
self.listcomprehension_walk2(node)
|
||||
else:
|
||||
self.comprehension_walk3(node, 1, 0)
|
||||
@@ -1305,7 +1387,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = n[3]
|
||||
elif n in ('list_if', 'list_if_not', 'comp_if', 'comp_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].type == 'expr':
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
@@ -1421,9 +1503,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.println(':')
|
||||
|
||||
# class body
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
self.build_class(subclass_code)
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
|
||||
self.currentclass = cclass
|
||||
if len(self.param_stack) > 1:
|
||||
@@ -1457,10 +1539,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def print_super_classes3(self, node):
|
||||
n = len(node)-1
|
||||
if node.type != 'expr':
|
||||
assert node[n].type.startswith('CALL_FUNCTION')
|
||||
if node.kind != 'expr':
|
||||
assert node[n].kind.startswith('CALL_FUNCTION')
|
||||
for i in range(n-2, 0, -1):
|
||||
if not node[i].type in ['expr', 'LOAD_CLASSNAME']:
|
||||
if not node[i].kind in ['expr', 'LOAD_CLASSNAME']:
|
||||
break
|
||||
pass
|
||||
|
||||
@@ -1494,13 +1576,13 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
self.write('{')
|
||||
line_number = self.line_number
|
||||
|
||||
if self.version >= 3.0 and not self.is_pypy:
|
||||
if node[0].type.startswith('kvlist'):
|
||||
if node[0].kind.startswith('kvlist'):
|
||||
# Python 3.5+ style key/value list in mapexpr
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
@@ -1523,11 +1605,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
i += 2
|
||||
pass
|
||||
pass
|
||||
elif len(node) > 1 and node[1].type.startswith('kvlist'):
|
||||
elif len(node) > 1 and node[1].kind.startswith('kvlist'):
|
||||
# Python 3.0..3.4 style key/value list in mapexpr
|
||||
kv_node = node[1]
|
||||
l = list(kv_node)
|
||||
if len(l) > 0 and l[0].type == 'kv3':
|
||||
if len(l) > 0 and l[0].kind == 'kv3':
|
||||
# Python 3.2 does this
|
||||
kv_node = node[1][0]
|
||||
l = list(kv_node)
|
||||
@@ -1552,7 +1634,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
i += 3
|
||||
pass
|
||||
pass
|
||||
elif node[-1].type.startswith('BUILD_CONST_KEY_MAP'):
|
||||
elif node[-1].kind.startswith('BUILD_CONST_KEY_MAP'):
|
||||
# Python 3.6+ style const map
|
||||
keys = node[-2].pattr
|
||||
values = node[:-2]
|
||||
@@ -1577,7 +1659,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
pass
|
||||
else:
|
||||
# Python 2 style kvlist
|
||||
assert node[-1].type.startswith('kvlist')
|
||||
assert node[-1].kind.startswith('kvlist')
|
||||
kv_node = node[-1] # goto kvlist
|
||||
|
||||
first_time = True
|
||||
@@ -1632,7 +1714,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if sep.startswith(",\n"):
|
||||
self.write(sep[1:])
|
||||
self.write('}')
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
@@ -1643,7 +1725,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
p = self.prec
|
||||
self.prec = 100
|
||||
lastnode = node.pop()
|
||||
lastnodetype = lastnode.type
|
||||
lastnodetype = lastnode.kind
|
||||
|
||||
# If this build list is inside a CALL_FUNCTION_VAR,
|
||||
# then the first * has already been printed.
|
||||
@@ -1683,7 +1765,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
|
||||
self.indentMore(INDENT_PER_LEVEL)
|
||||
self.indent_more(INDENT_PER_LEVEL)
|
||||
sep = ''
|
||||
|
||||
for elem in flat_elems:
|
||||
@@ -1708,12 +1790,12 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if lastnode.attr == 1 and lastnodetype.startswith('BUILD_TUPLE'):
|
||||
self.write(',')
|
||||
self.write(endchar)
|
||||
self.indentLess(INDENT_PER_LEVEL)
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
self.prec = p
|
||||
self.prune()
|
||||
|
||||
def n_unpack(self, node):
|
||||
if node[0].type.startswith('UNPACK_EX'):
|
||||
if node[0].kind.startswith('UNPACK_EX'):
|
||||
# Python 3+
|
||||
before_count, after_count = node[0].attr
|
||||
for i in range(before_count+1):
|
||||
@@ -1728,8 +1810,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
return
|
||||
for n in node[1:]:
|
||||
if n[0].type == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
if n[0].kind == 'unpack':
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
n_unpack_w_parens = n_unpack
|
||||
@@ -1738,33 +1820,34 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# A horrible hack for Python 3.0 .. 3.2
|
||||
if 3.0 <= self.version <= 3.2 and len(node) == 2:
|
||||
if (node[0][0] == 'LOAD_FAST' and node[0][0].pattr == '__locals__' and
|
||||
node[1][0].type == 'STORE_LOCALS'):
|
||||
node[1][0].kind == 'STORE_LOCALS'):
|
||||
self.prune()
|
||||
self.default(node)
|
||||
|
||||
def n_assign2(self, node):
|
||||
for n in node[-2:]:
|
||||
if n[0] == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def n_assign3(self, node):
|
||||
for n in node[-3:]:
|
||||
if n[0] == 'unpack':
|
||||
n[0].type = 'unpack_w_parens'
|
||||
n[0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def n_except_cond2(self, node):
|
||||
if node[-2][0] == 'unpack':
|
||||
node[-2][0].type = 'unpack_w_parens'
|
||||
node[-2][0].kind = 'unpack_w_parens'
|
||||
self.default(node)
|
||||
|
||||
def engine(self, entry, startnode):
|
||||
def template_engine(self, entry, startnode):
|
||||
"""The format template interpetation engine. See the comment at the
|
||||
beginning of this module for the how we interpret format specifications such as
|
||||
%c, %C, and so on.
|
||||
beginning of this module for the how we interpret format
|
||||
specifications such as %c, %C, and so on.
|
||||
"""
|
||||
# self.println("----> ", startnode.type, ', ', entry[0])
|
||||
|
||||
# self.println("----> ", startnode.kind, ', ', entry[0])
|
||||
fmt = entry[0]
|
||||
arg = 1
|
||||
i = 0
|
||||
@@ -1782,24 +1865,23 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if typ == '%': self.write('%')
|
||||
elif typ == '+':
|
||||
self.line_number += 1
|
||||
self.indentMore()
|
||||
self.indent_more()
|
||||
elif typ == '-':
|
||||
self.line_number += 1
|
||||
self.indentLess()
|
||||
self.indent_less()
|
||||
elif typ == '|':
|
||||
self.line_number += 1
|
||||
self.write(self.indent)
|
||||
# Used mostly on the LHS of an assignment
|
||||
# BUILD_TUPLE_n is pretty printed and may take care of other uses.
|
||||
elif typ == ',':
|
||||
if (node.type in ('unpack', 'unpack_w_parens') and
|
||||
if (node.kind in ('unpack', 'unpack_w_parens') and
|
||||
node[0].attr == 1):
|
||||
self.write(',')
|
||||
elif typ == 'c':
|
||||
if isinstance(entry[arg], int):
|
||||
entry_node = node[entry[arg]]
|
||||
self.preorder(entry_node)
|
||||
arg += 1
|
||||
entry_node = node[entry[arg]]
|
||||
self.preorder(entry_node)
|
||||
arg += 1
|
||||
elif typ == 'p':
|
||||
p = self.prec
|
||||
(index, self.prec) = entry[arg]
|
||||
@@ -1865,8 +1947,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
key = key[i]
|
||||
pass
|
||||
|
||||
if key.type in table:
|
||||
self.engine(table[key.type], node)
|
||||
if key.kind in table:
|
||||
self.template_engine(table[key.kind], node)
|
||||
self.prune()
|
||||
|
||||
def customize(self, customize):
|
||||
@@ -1890,7 +1972,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||
if v == 0:
|
||||
str = '%c(%C' # '%C' is a dummy here ...
|
||||
p2 = (0, 0, None) # .. because of this
|
||||
p2 = (0, 0, None) # .. because of the None in this
|
||||
else:
|
||||
str = '%c(%C, '
|
||||
p2 = (1, -2, ', ')
|
||||
@@ -2085,6 +2167,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# assert isinstance(tokens[0], Token)
|
||||
|
||||
if isLambda:
|
||||
for t in tokens:
|
||||
if t.kind == 'RETURN_END_IF':
|
||||
t.kind = 'RETURN_END_IF_LAMBDA'
|
||||
elif t.kind == 'RETURN_VALUE':
|
||||
t.kind = 'RETURN_VALUE_LAMBDA'
|
||||
tokens.append(Token('LAMBDA_MARKER'))
|
||||
try:
|
||||
ast = python_parser.parse(self.p, tokens, customize)
|
||||
@@ -2101,10 +2188,10 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
# than fight (with the grammar to not emit "return None").
|
||||
if self.hide_internal:
|
||||
if len(tokens) >= 2 and not noneInNames:
|
||||
if tokens[-1].type == 'RETURN_VALUE':
|
||||
if tokens[-1].kind in ('RETURN_VALUE', 'RETURN_VALUE_LAMBDA'):
|
||||
# Python 3.4's classes can add a "return None" which is
|
||||
# invalid syntax.
|
||||
if tokens[-2].type == 'LOAD_CONST':
|
||||
if tokens[-2].kind == 'LOAD_CONST':
|
||||
if isTopLevel or tokens[-2].pattr is None:
|
||||
del tokens[-2:]
|
||||
else:
|
||||
|
@@ -43,7 +43,7 @@ BIN_OP_FUNCS = {
|
||||
'BINARY_OR': operator.or_,
|
||||
}
|
||||
|
||||
JUMP_OPs = None
|
||||
JUMP_OPS = None
|
||||
|
||||
# --- exceptions ---
|
||||
|
||||
@@ -225,8 +225,8 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
import uncompyle6.scanners.scanner36 as scan
|
||||
scanner = scan.Scanner36()
|
||||
|
||||
global JUMP_OPs
|
||||
JUMP_OPs = list(scan.JUMP_OPs) + ['JUMP_BACK']
|
||||
global JUMP_OPS
|
||||
JUMP_OPS = list(scan.JUMP_OPS) + ['JUMP_BACK']
|
||||
|
||||
# use changed Token class
|
||||
# We (re)set this here to save exception handling,
|
||||
@@ -242,18 +242,18 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
scanner.resetTokenClass() # restore Token class
|
||||
|
||||
targets1 = dis.findlabels(code_obj1.co_code)
|
||||
tokens1 = [t for t in tokens1 if t.type != 'COME_FROM']
|
||||
tokens2 = [t for t in tokens2 if t.type != 'COME_FROM']
|
||||
tokens1 = [t for t in tokens1 if t.kind != 'COME_FROM']
|
||||
tokens2 = [t for t in tokens2 if t.kind != 'COME_FROM']
|
||||
|
||||
i1 = 0; i2 = 0
|
||||
offset_map = {}; check_jumps = {}
|
||||
while i1 < len(tokens1):
|
||||
if i2 >= len(tokens2):
|
||||
if len(tokens1) == len(tokens2) + 2 \
|
||||
and tokens1[-1].type == 'RETURN_VALUE' \
|
||||
and tokens1[-2].type == 'LOAD_CONST' \
|
||||
and tokens1[-1].kind == 'RETURN_VALUE' \
|
||||
and tokens1[-2].kind == 'LOAD_CONST' \
|
||||
and tokens1[-2].pattr is None \
|
||||
and tokens1[-3].type == 'RETURN_VALUE':
|
||||
and tokens1[-3].kind == 'RETURN_VALUE':
|
||||
break
|
||||
else:
|
||||
raise CmpErrorCodeLen(name, tokens1, tokens2)
|
||||
@@ -265,13 +265,13 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
raise CmpErrorCode(name, tokens1[idx1].offset, tokens1[idx1],
|
||||
tokens2[idx2], tokens1, tokens2)
|
||||
|
||||
if tokens1[i1].type != tokens2[i2].type:
|
||||
if tokens1[i1].type == 'LOAD_CONST' == tokens2[i2].type:
|
||||
if tokens1[i1].kind != tokens2[i2].kind:
|
||||
if tokens1[i1].kind == 'LOAD_CONST' == tokens2[i2].kind:
|
||||
i = 1
|
||||
while tokens1[i1+i].type == 'LOAD_CONST':
|
||||
while tokens1[i1+i].kind == 'LOAD_CONST':
|
||||
i += 1
|
||||
if tokens1[i1+i].type.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \
|
||||
and i == int(tokens1[i1+i].type.split('_')[-1]):
|
||||
if tokens1[i1+i].kind.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \
|
||||
and i == int(tokens1[i1+i].kind.split('_')[-1]):
|
||||
t = tuple([ elem.pattr for elem in tokens1[i1:i1+i] ])
|
||||
if t != tokens2[i2].pattr:
|
||||
raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1],
|
||||
@@ -279,60 +279,60 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
i1 += i + 1
|
||||
i2 += 1
|
||||
continue
|
||||
elif i == 2 and tokens1[i1+i].type == 'ROT_TWO' and tokens2[i2+1].type == 'UNPACK_SEQUENCE_2':
|
||||
elif i == 2 and tokens1[i1+i].kind == 'ROT_TWO' and tokens2[i2+1].kind == 'UNPACK_SEQUENCE_2':
|
||||
i1 += 3
|
||||
i2 += 2
|
||||
continue
|
||||
elif i == 2 and tokens1[i1+i].type in BIN_OP_FUNCS:
|
||||
f = BIN_OP_FUNCS[tokens1[i1+i].type]
|
||||
elif i == 2 and tokens1[i1+i].kind in BIN_OP_FUNCS:
|
||||
f = BIN_OP_FUNCS[tokens1[i1+i].kind]
|
||||
if f(tokens1[i1].pattr, tokens1[i1+1].pattr) == tokens2[i2].pattr:
|
||||
i1 += 3
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens1[i1].type == 'UNARY_NOT':
|
||||
if tokens2[i2].type == 'POP_JUMP_IF_TRUE':
|
||||
if tokens1[i1+1].type == 'POP_JUMP_IF_FALSE':
|
||||
elif tokens1[i1].kind == 'UNARY_NOT':
|
||||
if tokens2[i2].kind == 'POP_JUMP_IF_TRUE':
|
||||
if tokens1[i1+1].kind == 'POP_JUMP_IF_FALSE':
|
||||
i1 += 2
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens2[i2].type == 'POP_JUMP_IF_FALSE':
|
||||
if tokens1[i1+1].type == 'POP_JUMP_IF_TRUE':
|
||||
elif tokens2[i2].kind == 'POP_JUMP_IF_FALSE':
|
||||
if tokens1[i1+1].kind == 'POP_JUMP_IF_TRUE':
|
||||
i1 += 2
|
||||
i2 += 1
|
||||
continue
|
||||
elif tokens1[i1].type in ('JUMP_FORWARD', 'JUMP_BACK') \
|
||||
and tokens1[i1-1].type == 'RETURN_VALUE' \
|
||||
and tokens2[i2-1].type in ('RETURN_VALUE', 'RETURN_END_IF') \
|
||||
elif tokens1[i1].kind in ('JUMP_FORWARD', 'JUMP_BACK') \
|
||||
and tokens1[i1-1].kind == 'RETURN_VALUE' \
|
||||
and tokens2[i2-1].kind in ('RETURN_VALUE', 'RETURN_END_IF') \
|
||||
and int(tokens1[i1].offset) not in targets1:
|
||||
i1 += 1
|
||||
continue
|
||||
elif tokens1[i1].type == 'JUMP_FORWARD' and tokens2[i2].type == 'JUMP_BACK' \
|
||||
and tokens1[i1+1].type == 'JUMP_BACK' and tokens2[i2+1].type == 'JUMP_BACK' \
|
||||
elif tokens1[i1].kind == 'JUMP_FORWARD' and tokens2[i2].kind == 'JUMP_BACK' \
|
||||
and tokens1[i1+1].kind == 'JUMP_BACK' and tokens2[i2+1].kind == 'JUMP_BACK' \
|
||||
and int(tokens1[i1].pattr) == int(tokens1[i1].offset) + 3:
|
||||
if int(tokens1[i1].pattr) == int(tokens1[i1+1].offset):
|
||||
i1 += 2
|
||||
i2 += 2
|
||||
continue
|
||||
elif tokens1[i1].type == 'LOAD_NAME' and tokens2[i2].type == 'LOAD_CONST' \
|
||||
elif tokens1[i1].kind == 'LOAD_NAME' and tokens2[i2].kind == 'LOAD_CONST' \
|
||||
and tokens1[i1].pattr == 'None' and tokens2[i2].pattr is None:
|
||||
pass
|
||||
elif tokens1[i1].type == 'LOAD_GLOBAL' and tokens2[i2].type == 'LOAD_NAME' \
|
||||
elif tokens1[i1].kind == 'LOAD_GLOBAL' and tokens2[i2].kind == 'LOAD_NAME' \
|
||||
and tokens1[i1].pattr == tokens2[i2].pattr:
|
||||
pass
|
||||
elif tokens1[i1].type == 'LOAD_ASSERT' and tokens2[i2].type == 'LOAD_NAME' \
|
||||
elif tokens1[i1].kind == 'LOAD_ASSERT' and tokens2[i2].kind == 'LOAD_NAME' \
|
||||
and tokens1[i1].pattr == tokens2[i2].pattr:
|
||||
pass
|
||||
elif (tokens1[i1].type == 'RETURN_VALUE' and
|
||||
tokens2[i2].type == 'RETURN_END_IF'):
|
||||
elif (tokens1[i1].kind == 'RETURN_VALUE' and
|
||||
tokens2[i2].kind == 'RETURN_END_IF'):
|
||||
pass
|
||||
elif (tokens1[i1].type == 'BUILD_TUPLE_0' and
|
||||
elif (tokens1[i1].kind == 'BUILD_TUPLE_0' and
|
||||
tokens2[i2].pattr == ()):
|
||||
pass
|
||||
else:
|
||||
raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1],
|
||||
tokens2[i2], tokens1, tokens2)
|
||||
elif tokens1[i1].type in JUMP_OPs and tokens1[i1].pattr != tokens2[i2].pattr:
|
||||
if tokens1[i1].type == 'JUMP_BACK':
|
||||
elif tokens1[i1].kind in JUMP_OPS and tokens1[i1].pattr != tokens2[i2].pattr:
|
||||
if tokens1[i1].kind == 'JUMP_BACK':
|
||||
dest1 = int(tokens1[i1].pattr)
|
||||
dest2 = int(tokens2[i2].pattr)
|
||||
if offset_map[dest1] != dest2:
|
||||
@@ -387,28 +387,28 @@ def cmp_code_objects(version, is_pypy, code_obj1, code_obj2,
|
||||
class Token(scanner.Token):
|
||||
"""Token class with changed semantics for 'cmp()'."""
|
||||
def __cmp__(self, o):
|
||||
t = self.type # shortcut
|
||||
if t == 'BUILD_TUPLE_0' and o.type == 'LOAD_CONST' and o.pattr == ():
|
||||
t = self.kind # shortcut
|
||||
if t == 'BUILD_TUPLE_0' and o.kind == 'LOAD_CONST' and o.pattr == ():
|
||||
return 0
|
||||
if t == 'COME_FROM' == o.type:
|
||||
if t == 'COME_FROM' == o.kind:
|
||||
return 0
|
||||
if t == 'PRINT_ITEM_CONT' and o.type == 'PRINT_ITEM':
|
||||
if t == 'PRINT_ITEM_CONT' and o.kind == 'PRINT_ITEM':
|
||||
return 0
|
||||
if t == 'RETURN_VALUE' and o.type == 'RETURN_END_IF':
|
||||
if t == 'RETURN_VALUE' and o.kind == 'RETURN_END_IF':
|
||||
return 0
|
||||
if t == 'JUMP_IF_FALSE_OR_POP' and o.type == 'POP_JUMP_IF_FALSE':
|
||||
if t == 'JUMP_IF_FALSE_OR_POP' and o.kind == 'POP_JUMP_IF_FALSE':
|
||||
return 0
|
||||
if JUMP_OPs and t in JUMP_OPs:
|
||||
if JUMP_OPS and t in JUMP_OPS:
|
||||
# ignore offset
|
||||
return t == o.type
|
||||
return (t == o.type) or self.pattr == o.pattr
|
||||
return t == o.kind
|
||||
return (t == o.kind) or self.pattr == o.pattr
|
||||
|
||||
def __repr__(self):
|
||||
return '%s %s (%s)' % (str(self.type), str(self.attr),
|
||||
return '%s %s (%s)' % (str(self.kind), str(self.attr),
|
||||
repr(self.pattr))
|
||||
|
||||
def __str__(self):
|
||||
return '%s\t%-17s %r' % (self.offset, self.type, self.pattr)
|
||||
return '%s\t%-17s %r' % (self.offset, self.kind, self.pattr)
|
||||
|
||||
def compare_code_with_srcfile(pyc_filename, src_filename, weak_verify=False):
|
||||
"""Compare a .pyc with a source code file."""
|
||||
@@ -442,4 +442,4 @@ if __name__ == '__main__':
|
||||
t2 = Token('LOAD_CONST', -421, 'code_object _expandLang', 55)
|
||||
print(repr(t1))
|
||||
print(repr(t2))
|
||||
print(t1.type == t2.type, t1.attr == t2.attr)
|
||||
print(t1.kind == t2.kind, t1.attr == t2.attr)
|
||||
|
@@ -1,3 +1,3 @@
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION='2.11.5'
|
||||
VERSION='2.13.0'
|
||||
|
Reference in New Issue
Block a user