You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
Merge branch 'master' into python-2.4
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import pytest
|
||||
from uncompyle6.semantics.fragments import deparse_code as deparse, deparsed_find
|
||||
from uncompyle6.semantics.fragments import code_deparse as deparse, deparsed_find
|
||||
from uncompyle6 import PYTHON_VERSION, PYTHON3
|
||||
|
||||
def map_stmts(x, y):
|
||||
@@ -31,12 +31,13 @@ def list_comp():
|
||||
|
||||
def get_parsed_for_fn(fn):
|
||||
code = fn.func_code
|
||||
return deparse(PYTHON_VERSION, code)
|
||||
return deparse(code, version=PYTHON_VERSION)
|
||||
|
||||
def check_expect(expect, parsed, fn_name):
|
||||
debug = False
|
||||
i = 2
|
||||
max_expect = len(expect)
|
||||
code = get_parsed_for_fn(fn_name)
|
||||
for name, offset in sorted(parsed.offsets.keys()):
|
||||
assert i+1 <= max_expect, (
|
||||
"%s: ran out if items in testing node" % fn_name)
|
||||
|
@@ -66,9 +66,9 @@ def test_grammar():
|
||||
expect_dup_rhs = frozenset([('COME_FROM',), ('CONTINUE',), ('JUMP_ABSOLUTE',),
|
||||
('LOAD_CONST',),
|
||||
('JUMP_BACK',), ('JUMP_FORWARD',)])
|
||||
# reduced_dup_rhs = {k: dup_rhs[k] for k in dup_rhs if k not in expect_dup_rhs}
|
||||
# for k in reduced_dup_rhs:
|
||||
# print(k, reduced_dup_rhs[k])
|
||||
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
||||
for k in reduced_dup_rhs:
|
||||
print(k, reduced_dup_rhs[k])
|
||||
# assert not reduced_dup_rhs, reduced_dup_rhs
|
||||
|
||||
s = get_scanner(PYTHON_VERSION, IS_PYPY)
|
||||
|
23
pytest/test_token.py
Normal file
23
pytest/test_token.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from uncompyle6.scanners.tok import Token
|
||||
|
||||
def test_token():
|
||||
# Test token formatting of: LOAD_CONST None
|
||||
t = Token('LOAD_CONST', offset=0, attr=None, pattr=None, has_arg=True)
|
||||
expect = ' 0 LOAD_CONST None'
|
||||
# print(t.format())
|
||||
assert t
|
||||
assert t.format() == expect
|
||||
|
||||
# Make sure equality testing of tokens ignores offset
|
||||
t2 = Token('LOAD_CONST', offset=2, attr=None, pattr=None, has_arg=True)
|
||||
assert t2 == t
|
||||
|
||||
|
||||
# Make sure formatting of: LOAD_CONST False. We assume False is the 0th index
|
||||
# of co_consts.
|
||||
t = Token('LOAD_CONST', offset=1, attr=False, pattr=False, has_arg=True)
|
||||
expect = ' 1 LOAD_CONST 0 False'
|
||||
assert t.format() == expect
|
||||
|
||||
if __name__ == '__main__':
|
||||
test_token()
|
2
pytest/testdata/if-2.7.right
vendored
2
pytest/testdata/if-2.7.right
vendored
@@ -8,5 +8,5 @@
|
||||
9 STORE_NAME 2 'b'
|
||||
12 JUMP_FORWARD 0 'to 15'
|
||||
15_0 COME_FROM 12 '12'
|
||||
15 LOAD_CONST 0 ''
|
||||
15 LOAD_CONST 0 None
|
||||
18 RETURN_VALUE
|
||||
|
2
pytest/testdata/ifelse-2.7.right
vendored
2
pytest/testdata/ifelse-2.7.right
vendored
@@ -11,5 +11,5 @@
|
||||
6 15 LOAD_CONST 1 2
|
||||
18 STORE_NAME 2 'd'
|
||||
21_0 COME_FROM 12 '12'
|
||||
21 LOAD_CONST 2 ''
|
||||
21 LOAD_CONST 2 None
|
||||
24 RETURN_VALUE
|
||||
|
Binary file not shown.
BIN
test/bytecode_3.6/06_listcomp_nest.pyc
Normal file
BIN
test/bytecode_3.6/06_listcomp_nest.pyc
Normal file
Binary file not shown.
@@ -1,3 +1,8 @@
|
||||
# From 3.6 base64.py. Bug was handling *, and keyword args
|
||||
def a85decode(b, *, foldspaces=False, adobe=False, ignorechars=b' \t\n\r\v'):
|
||||
return
|
||||
|
||||
# From 3.6 configparser.py. Same problem as above.
|
||||
_UNSET = object()
|
||||
def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
|
||||
return
|
||||
|
24
test/simple_source/bug36/06_listcomp_nest.py
Normal file
24
test/simple_source/bug36/06_listcomp_nest.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# From 3.6 _sitebuiltins.py
|
||||
# Bug was in handling double nested kinds of things like:
|
||||
# for a in b for c in d
|
||||
|
||||
# This required grammar modification and
|
||||
# and semantic action changes. LOAD_CLOSUREs are stored
|
||||
# inside a MAKE_TUPLE.
|
||||
|
||||
# FIXME: test and try additional "if" clauses.
|
||||
def __init__(self, path, name, files=(), dirs=(), volumes=()):
|
||||
f = [path.join(dir, filename)
|
||||
for dir in dirs
|
||||
for filename in files]
|
||||
f2 = [path.join(drive, dir, filename)
|
||||
for dir in dirs
|
||||
for filename in files
|
||||
for drive in volumes]
|
||||
return f, f2
|
||||
|
||||
# From 3.6 codeop. The below listcomp is generated still
|
||||
# like it was in 3.5
|
||||
import __future__
|
||||
_features = [getattr(__future__, fname)
|
||||
for fname in __future__.all_feature_names]
|
@@ -886,8 +886,15 @@ class Python3Parser(PythonParser):
|
||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LISTCOMP'):
|
||||
if self.version >= 3.6:
|
||||
# 3.6+ sometimes bundles all of the
|
||||
# 'exprs' in the rule above into a
|
||||
# tuple.
|
||||
rule_pat = ("listcomp ::= load_closure LOAD_LISTCOMP %%s%s "
|
||||
"expr GET_ITER CALL_FUNCTION_1" % (opname,))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
rule_pat = ("listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||
"GET_ITER CALL_FUNCTION_1" % ('expr ' * args_pos, opname))
|
||||
"GET_ITER CALL_FUNCTION_1" % ('expr ' * args_pos, opname))
|
||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||
|
||||
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LAMBDA'):
|
||||
|
@@ -43,7 +43,9 @@ else:
|
||||
from array import array
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import Bytecode, op_has_argument, instruction_size
|
||||
from xdis.bytecode import (
|
||||
Bytecode, op_has_argument, instruction_size,
|
||||
_get_const_info)
|
||||
from xdis.util import code2num
|
||||
|
||||
from uncompyle6.scanner import Scanner
|
||||
@@ -237,7 +239,13 @@ class Scanner2(Scanner):
|
||||
# (id(const), const.co_filename, const.co_name)
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
if oparg < len(co.co_consts):
|
||||
argval, _ = _get_const_info(oparg, co.co_consts)
|
||||
# Why don't we use _ above for "pattr" rather than "const"?
|
||||
# This *is* a little hoaky, but we have to coordinate with
|
||||
# other parts like n_LOAD_CONST in pysource.py for example.
|
||||
pattr = const
|
||||
pass
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.JREL_OPS:
|
||||
|
@@ -1,6 +1,19 @@
|
||||
# Copyright (c) 2015-2017 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Python 2.6 bytecode scanner
|
||||
|
||||
@@ -20,6 +33,8 @@ from uncompyle6.scanner import L65536
|
||||
# bytecode verification, verify(), uses JUMP_OPs from here
|
||||
from xdis.opcodes import opcode_26
|
||||
from xdis.bytecode import Bytecode
|
||||
from xdis.bytecode import _get_const_info
|
||||
|
||||
JUMP_OPS = opcode_26.JUMP_OPS
|
||||
|
||||
class Scanner26(scan.Scanner2):
|
||||
@@ -212,7 +227,13 @@ class Scanner26(scan.Scanner2):
|
||||
# (id(const), const.co_filename, const.co_name)
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
if oparg < len(co.co_consts):
|
||||
argval, _ = _get_const_info(oparg, co.co_consts)
|
||||
# Why don't we use _ above for "pattr" rather than "const"?
|
||||
# This *is* a little hoaky, but we have to coordinate with
|
||||
# other parts like n_LOAD_CONST in pysource.py for example.
|
||||
pattr = const
|
||||
pass
|
||||
elif op in self.opc.NAME_OPS:
|
||||
pattr = names[oparg]
|
||||
elif op in self.opc.JREL_OPS:
|
||||
|
@@ -14,7 +14,6 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Python 3 Generic bytecode scanner/deparser
|
||||
|
||||
@@ -44,7 +43,7 @@ else:
|
||||
from array import array
|
||||
|
||||
from xdis.code import iscode
|
||||
from xdis.bytecode import Bytecode, instruction_size
|
||||
from xdis.bytecode import Bytecode, instruction_size, _get_const_info
|
||||
|
||||
from uncompyle6.scanner import Token, parse_fn_counts
|
||||
import xdis
|
||||
@@ -346,6 +345,11 @@ class Scanner3(Scanner):
|
||||
# (id(const), const.co_filename, const.co_name)
|
||||
pattr = '<code_object ' + const.co_name + '>'
|
||||
else:
|
||||
if isinstance(inst.arg, int) and inst.arg < len(co.co_consts):
|
||||
argval, _ = _get_const_info(inst.arg, co.co_consts)
|
||||
# Why don't we use _ above for "pattr" rather than "const"?
|
||||
# This *is* a little hoaky, but we have to coordinate with
|
||||
# other parts like n_LOAD_CONST in pysource.py for example.
|
||||
pattr = const
|
||||
pass
|
||||
elif opname in ('MAKE_FUNCTION', 'MAKE_CLOSURE'):
|
||||
@@ -356,11 +360,6 @@ class Scanner3(Scanner):
|
||||
attr = []
|
||||
for flag in self.MAKE_FUNCTION_FLAGS:
|
||||
bit = flags & 1
|
||||
if bit:
|
||||
if pattr:
|
||||
pattr += ", " + flag
|
||||
else:
|
||||
pattr += flag
|
||||
attr.append(bit)
|
||||
flags >>= 1
|
||||
attr = attr[:4] # remove last value: attr[5] == False
|
||||
|
@@ -21,7 +21,7 @@ from uncompyle6 import PYTHON3
|
||||
if PYTHON3:
|
||||
intern = sys.intern
|
||||
|
||||
class Token:
|
||||
class Token():
|
||||
"""
|
||||
Class representing a byte-code instruction.
|
||||
|
||||
@@ -35,7 +35,6 @@ class Token:
|
||||
def __init__(self, opname, attr=None, pattr=None, offset=-1,
|
||||
linestart=None, op=None, has_arg=None, opc=None):
|
||||
self.kind = intern(opname)
|
||||
self.op = op
|
||||
self.has_arg = has_arg
|
||||
self.attr = attr
|
||||
self.pattr = pattr
|
||||
@@ -44,7 +43,16 @@ class Token:
|
||||
if has_arg is False:
|
||||
self.attr = None
|
||||
self.pattr = None
|
||||
self.opc = opc
|
||||
|
||||
if opc is None:
|
||||
from xdis.std import _std_api
|
||||
self.opc = _std_api.opc
|
||||
else:
|
||||
self.opc = opc
|
||||
if op is None:
|
||||
self.op = self.opc.opmap.get(self.kind, None)
|
||||
else:
|
||||
self.op = op
|
||||
|
||||
def __eq__(self, o):
|
||||
""" '==' on kind and "pattr" attributes.
|
||||
@@ -84,7 +92,7 @@ class Token:
|
||||
argstr = "%6d " % self.attr
|
||||
else:
|
||||
argstr = ' '*7
|
||||
if self.pattr:
|
||||
if self.has_arg:
|
||||
pattr = self.pattr
|
||||
if self.opc:
|
||||
if self.op in self.opc.JREL_OPS:
|
||||
@@ -95,6 +103,11 @@ class Token:
|
||||
if not self.pattr.startswith('to '):
|
||||
pattr = "to " + str(self.pattr)
|
||||
pass
|
||||
elif self.op in self.opc.CONST_OPS:
|
||||
# Compare with pysource n_LOAD_CONST
|
||||
attr = self.attr
|
||||
if attr is None:
|
||||
pattr = None
|
||||
elif self.op in self.opc.hascompare:
|
||||
if isinstance(self.attr, int):
|
||||
pattr = self.opc.cmp_op[self.attr]
|
||||
|
@@ -1,7 +1,25 @@
|
||||
# Copyright (c) 2018 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
from uncompyle6.semantics.pysource import (
|
||||
SourceWalker, SourceWalkerError, find_globals, ASSIGN_DOC_STRING, RETURN_NONE)
|
||||
|
||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6 import IS_PYPY
|
||||
|
||||
class AligningWalker(SourceWalker, object):
|
||||
def __init__(self, version, out, scanner, showast=False,
|
||||
debug_parser=PARSER_DEFAULT_DEBUG,
|
||||
@@ -82,26 +100,45 @@ from uncompyle6.show import (
|
||||
maybe_show_asm,
|
||||
)
|
||||
|
||||
def align_deparse_code(version, co, out=sys.stderr, showasm=False, showast=False,
|
||||
showgrammar=False, code_objects={}, compile_mode='exec', is_pypy=False):
|
||||
#
|
||||
DEFAULT_DEBUG_OPTS = {
|
||||
'asm': False,
|
||||
'tree': False,
|
||||
'grammar': False
|
||||
}
|
||||
|
||||
def code_deparse_align(co, out=sys.stderr, version=None, is_pypy=None,
|
||||
debug_opts=DEFAULT_DEBUG_OPTS,
|
||||
code_objects={}, compile_mode='exec'):
|
||||
"""
|
||||
ingests and deparses a given code block 'co'
|
||||
"""
|
||||
|
||||
assert iscode(co)
|
||||
|
||||
if version is None:
|
||||
version = float(sys.version[0:3])
|
||||
if is_pypy is None:
|
||||
is_pypy = IS_PYPY
|
||||
|
||||
|
||||
# store final output stream for case of error
|
||||
scanner = get_scanner(version, is_pypy=is_pypy)
|
||||
|
||||
tokens, customize = scanner.ingest(co, code_objects=code_objects)
|
||||
maybe_show_asm(showasm, tokens)
|
||||
show_asm = debug_opts.get('asm', None)
|
||||
maybe_show_asm(show_asm, tokens)
|
||||
|
||||
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||
if showgrammar:
|
||||
debug_parser['reduce'] = showgrammar
|
||||
show_grammar = debug_opts.get('grammar', None)
|
||||
show_grammar = debug_opts.get('grammar', None)
|
||||
if show_grammar:
|
||||
debug_parser['reduce'] = show_grammar
|
||||
debug_parser['errorstack'] = True
|
||||
|
||||
# Build a parse tree from tokenized and massaged disassembly.
|
||||
deparsed = AligningWalker(version, scanner, out, showast=showast,
|
||||
show_ast = debug_opts.get('ast', None)
|
||||
deparsed = AligningWalker(version, scanner, out, showast=show_ast,
|
||||
debug_parser=debug_parser, compile_mode=compile_mode,
|
||||
is_pypy = is_pypy)
|
||||
|
||||
@@ -138,10 +175,7 @@ def align_deparse_code(version, co, out=sys.stderr, showasm=False, showast=False
|
||||
if __name__ == '__main__':
|
||||
def deparse_test(co):
|
||||
"This is a docstring"
|
||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
# deparsed = deparse_code(sys_version, co, showasm=True, showast=True)
|
||||
deparsed = align_deparse_code(sys_version, co, showasm=False, showast=False,
|
||||
showgrammar=False)
|
||||
deparsed = code_deparse_align(co)
|
||||
print(deparsed.text)
|
||||
return
|
||||
deparse_test(deparse_test.__code__)
|
||||
|
@@ -526,30 +526,49 @@ def customize_for_version(self, is_pypy, version):
|
||||
sep = INDENT_PER_LEVEL[:-1]
|
||||
line_number = self.line_number
|
||||
|
||||
assert node[0].kind.startswith('kvlist')
|
||||
# Python 3.5+ style key/value list in dict
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
i = 0
|
||||
# Respect line breaks from source
|
||||
while i < len(l):
|
||||
self.write(sep)
|
||||
name = self.traverse(l[i], indent='')
|
||||
# Strip off beginning and trailing quotes in name
|
||||
name = name[1:-1]
|
||||
if i > 0:
|
||||
line_number = self.indent_if_source_nl(line_number,
|
||||
self.indent + INDENT_PER_LEVEL[:-1])
|
||||
line_number = self.line_number
|
||||
self.write(name, '=')
|
||||
value = self.traverse(l[i+1], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(value)
|
||||
sep = ","
|
||||
if line_number != self.line_number:
|
||||
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
if node[0].kind.startswith('kvlist'):
|
||||
# Python 3.5+ style key/value list in dict
|
||||
kv_node = node[0]
|
||||
l = list(kv_node)
|
||||
i = 0
|
||||
# Respect line breaks from source
|
||||
while i < len(l):
|
||||
self.write(sep)
|
||||
name = self.traverse(l[i], indent='')
|
||||
# Strip off beginning and trailing quotes in name
|
||||
name = name[1:-1]
|
||||
if i > 0:
|
||||
line_number = self.indent_if_source_nl(line_number,
|
||||
self.indent + INDENT_PER_LEVEL[:-1])
|
||||
line_number = self.line_number
|
||||
i += 2
|
||||
pass
|
||||
self.write(name, '=')
|
||||
value = self.traverse(l[i+1], indent=self.indent+(len(name)+2)*' ')
|
||||
self.write(value)
|
||||
sep = ", "
|
||||
if line_number != self.line_number:
|
||||
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
line_number = self.line_number
|
||||
i += 2
|
||||
pass
|
||||
elif node[-1].kind.startswith('BUILD_CONST_KEY_MAP'):
|
||||
keys_node = node[-2]
|
||||
keys = keys_node.attr
|
||||
# from trepan.api import debug; debug()
|
||||
assert keys_node == 'LOAD_CONST' and isinstance(keys, tuple)
|
||||
for i in range(node[-1].attr):
|
||||
self.write(sep)
|
||||
self.write(keys[i], '=')
|
||||
value = self.traverse(node[i], indent='')
|
||||
self.write(value)
|
||||
sep = ", "
|
||||
if line_number != self.line_number:
|
||||
sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1]
|
||||
line_number = self.line_number
|
||||
pass
|
||||
pass
|
||||
else:
|
||||
assert False, "Don't known to to untangle dictionary"
|
||||
|
||||
self.prec = p
|
||||
self.indent_less(INDENT_PER_LEVEL)
|
||||
return
|
||||
@@ -592,6 +611,11 @@ def customize_for_version(self, is_pypy, version):
|
||||
n = len(node)
|
||||
assert n >= len(keys)+1, \
|
||||
'not enough parameters keyword-tuple values'
|
||||
# try:
|
||||
# assert n >= len(keys)+1, \
|
||||
# 'not enough parameters keyword-tuple values'
|
||||
# except:
|
||||
# from trepan.api import debug; debug()
|
||||
sep = ''
|
||||
# FIXME: adjust output for line breaks?
|
||||
for i in range(num_posargs):
|
||||
|
@@ -1,5 +1,18 @@
|
||||
# Copyright (c) 2018 by Rocky Bernstein
|
||||
from uncompyle6.semantics.pysource import SourceWalker, deparse_code
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
from uncompyle6.semantics.pysource import SourceWalker, code_deparse
|
||||
import uncompyle6.semantics.fragments as fragments
|
||||
|
||||
# FIXME: does this handle nested code, and lambda properly
|
||||
@@ -46,42 +59,49 @@ class LineMapFragmentWalker(fragments.FragmentsWalker, LineMapWalker):
|
||||
def deparse_code_with_map(*args, **kwargs):
|
||||
"""
|
||||
Like deparse_code but saves line number correspondences.
|
||||
Deprecated. Use code_deparse_with_map
|
||||
"""
|
||||
kwargs['walker'] = LineMapWalker
|
||||
return deparse_code(*args, **kwargs)
|
||||
return code_deparse(*args, **kwargs)
|
||||
|
||||
def code_deparse_with_map(*args, **kwargs):
|
||||
"""
|
||||
Like code_deparse but saves line number correspondences.
|
||||
"""
|
||||
kwargs['walker'] = LineMapWalker
|
||||
return code_deparse(*args, **kwargs)
|
||||
|
||||
def deparse_code_with_fragments_and_map(*args, **kwargs):
|
||||
"""
|
||||
Like deparse_code_with_map but saves fragments.
|
||||
Deprecated. Use code_deparse_with_fragments_and_map
|
||||
"""
|
||||
kwargs['walker'] = LineMapFragmentWalker
|
||||
return fragments.deparse_code(*args, **kwargs)
|
||||
|
||||
def code_deparse_with_fragments_and_map(*args, **kwargs):
|
||||
"""
|
||||
Like code_deparse_with_map but saves fragments.
|
||||
"""
|
||||
kwargs['walker'] = LineMapFragmentWalker
|
||||
return fragments.code_deparse(*args, **kwargs)
|
||||
|
||||
if __name__ == '__main__':
|
||||
def deparse_test(co):
|
||||
"This is a docstring"
|
||||
import sys
|
||||
sys_version = float(sys.version[0:3])
|
||||
# deparsed = deparse_code(sys_version, co, showasm=True, showast=True)
|
||||
deparsed = deparse_code_with_map(sys_version, co, showasm=False,
|
||||
showast=False,
|
||||
showgrammar=False)
|
||||
deparsed = code_deparse_with_map(co)
|
||||
a = 1; b = 2
|
||||
print("\n")
|
||||
linemap = [(line_no, deparsed.source_linemap[line_no])
|
||||
for line_no in
|
||||
sorted(deparsed.source_linemap.keys())]
|
||||
print(linemap)
|
||||
deparsed = deparse_code_with_fragments_and_map(sys_version,
|
||||
co, showasm=False,
|
||||
showast=False,
|
||||
showgrammar=False)
|
||||
a = 1; b = 2
|
||||
deparsed = code_deparse_with_fragments_and_map(co)
|
||||
print("\n")
|
||||
linemap2 = [(line_no, deparsed.source_linemap[line_no])
|
||||
for line_no in
|
||||
sorted(deparsed.source_linemap.keys())]
|
||||
print(linemap2)
|
||||
assert linemap == linemap2
|
||||
# assert linemap == linemap2
|
||||
return
|
||||
deparse_test(deparse_test.__code__)
|
||||
|
@@ -1,5 +1,18 @@
|
||||
# Copyright (c) 2015-2018 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions
|
||||
"""
|
||||
@@ -658,24 +671,27 @@ def make_function3(self, node, is_lambda, nested=1, codeNode=None):
|
||||
# kwonlyargcount = co.co_kwonlyargcount
|
||||
|
||||
free_tup = annotate_dict = kw_dict = default_tup = None
|
||||
index = 0
|
||||
# FIXME: this is woefully wrong
|
||||
if argc & 8:
|
||||
fn_bits = node[-1].attr
|
||||
index = -4 # Skip over:
|
||||
# MAKE_FUNCTION,
|
||||
# LOAD_CONST qualified name,
|
||||
# LOAD_CONST code object
|
||||
if fn_bits[-1]:
|
||||
free_tup = node[index]
|
||||
index += 1
|
||||
if argc & 4:
|
||||
kw_dict = node[1]
|
||||
index += 1
|
||||
if argc & 2:
|
||||
kw_dict = node[index]
|
||||
index += 1
|
||||
if argc & 1:
|
||||
index -= 1
|
||||
if fn_bits[-2]:
|
||||
annotate_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-3]:
|
||||
kw_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-4]:
|
||||
default_tup = node[index]
|
||||
|
||||
if kw_dict == 'expr':
|
||||
kw_dict = kw_dict[0]
|
||||
|
||||
# FIXME: handle free_tup, annotatate_dict, and default_tup
|
||||
# FIXME: handle free_tup, annotate_dict, and default_tup
|
||||
if kw_dict:
|
||||
assert kw_dict == 'dict'
|
||||
defaults = [self.traverse(n, indent='') for n in kw_dict[:-2]]
|
||||
|
@@ -1,3 +1,17 @@
|
||||
# Copyright (c) 2018 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
import uncompyle6.parser as python_parser
|
||||
class ParserError(python_parser.ParserError):
|
||||
def __init__(self, error, tokens):
|
||||
|
@@ -1079,6 +1079,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write(')')
|
||||
|
||||
def n_LOAD_CONST(self, node):
|
||||
attr = node.attr
|
||||
data = node.pattr; datatype = type(data)
|
||||
if isinstance(data, float) and str(data) in frozenset(['nan', '-nan', 'inf', '-inf']):
|
||||
# float values 'nan' and 'inf' are not directly representable in Python at least
|
||||
@@ -1093,13 +1094,15 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.write( hex(data) )
|
||||
elif datatype is type(Ellipsis):
|
||||
self.write('...')
|
||||
elif data is None:
|
||||
elif attr is None:
|
||||
# LOAD_CONST 'None' only occurs, when None is
|
||||
# implicit eg. in 'return' w/o params
|
||||
# pass
|
||||
self.write('None')
|
||||
elif isinstance(data, tuple):
|
||||
self.pp_tuple(data)
|
||||
elif isinstance(attr, bool):
|
||||
self.write(repr(attr))
|
||||
elif self.FUTURE_UNICODE_LITERALS:
|
||||
# The FUTURE_UNICODE_LITERALS compiler flag
|
||||
# in 2.6 on change the way
|
||||
@@ -1290,8 +1293,17 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_import_from(self, node):
|
||||
relative_path_index = 0
|
||||
if self.version >= 2.5 and node[relative_path_index].pattr > 0:
|
||||
node[2].pattr = '.'*node[relative_path_index].pattr + node[2].pattr
|
||||
if self.version >= 2.5:
|
||||
if node[relative_path_index].attr > 0:
|
||||
node[2].pattr = ('.' * node[relative_path_index].pattr) + node[2].pattr
|
||||
if self.version > 2.7:
|
||||
if isinstance(node[1].pattr, tuple):
|
||||
imports = node[1].pattr
|
||||
for pattr in imports:
|
||||
node[1].pattr = pattr
|
||||
self.default(node)
|
||||
return
|
||||
pass
|
||||
self.default(node)
|
||||
|
||||
n_import_from_star = n_import_from
|
||||
@@ -1532,10 +1544,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
|
||||
def comprehension_walk3(self, node, iter_index, code_index=-5):
|
||||
"""
|
||||
List comprehensions the way they are done in Python3.
|
||||
They are other comprehensions, e.g. set comprehensions
|
||||
See if we can combine code.
|
||||
"""Non-closure-based comprehensions the way they are done in Python3.
|
||||
They are other comprehensions, e.g. set comprehensions See if
|
||||
we can combine code.
|
||||
"""
|
||||
p = self.prec
|
||||
self.prec = 27
|
||||
@@ -1600,7 +1611,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
assert n.kind in ('lc_body', 'comp_body', 'setcomp_func', 'set_comp_body'), ast
|
||||
assert store, "Couldn't find store in list/set comprehension"
|
||||
|
||||
# Issue created with later Python code generation is that there
|
||||
# A problem created with later Python code generation is that there
|
||||
# is a lamda set up with a dummy argument name that is then called
|
||||
# So we can't just translate that as is but need to replace the
|
||||
# dummy name. Below we are picking out the variable name as seen
|
||||
@@ -1641,8 +1652,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prec = p
|
||||
|
||||
def listcomprehension_walk2(self, node):
|
||||
"""List comprehensions the way they are done in Python 2 (and
|
||||
some Python 3?).
|
||||
"""List comprehensions the way they are done in Python 2 and
|
||||
sometimes in Python 3.
|
||||
They're more other comprehensions, e.g. set comprehensions
|
||||
See if we can combine code.
|
||||
"""
|
||||
@@ -1652,45 +1663,72 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
code = Code(node[1].attr, self.scanner, self.currentclass)
|
||||
ast = self.build_ast(code._tokens, code._customize)
|
||||
self.customize(code._customize)
|
||||
if node == 'set_comp':
|
||||
ast = ast[0][0][0]
|
||||
else:
|
||||
ast = ast[0][0][0][0][0]
|
||||
|
||||
if ast == 'expr':
|
||||
# skip over: sstmt, stmt, return, ret_expr
|
||||
# and other singleton derivations
|
||||
while (len(ast) == 1
|
||||
or (ast in ('sstmt', 'return')
|
||||
and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))):
|
||||
ast = ast[0]
|
||||
|
||||
n = ast[1]
|
||||
collection = node[-3]
|
||||
list_if = None
|
||||
# collection = node[-3]
|
||||
collections = [node[-3]]
|
||||
list_ifs = []
|
||||
assert n == 'list_iter'
|
||||
|
||||
stores = []
|
||||
|
||||
# Find the list comprehension body. It is the inner-most
|
||||
# node that is not list_.. .
|
||||
while n == 'list_iter':
|
||||
n = n[0] # recurse one step
|
||||
if n == 'list_for':
|
||||
store = n[2]
|
||||
stores.append(n[2])
|
||||
n = n[3]
|
||||
if self.version >= 3.6 and n[0] == 'list_for':
|
||||
# Dog-paddle down largely singleton reductions
|
||||
# to find the collection (expr)
|
||||
c = n[0][0]
|
||||
if c == 'expr':
|
||||
c = c[0]
|
||||
# FIXME: grammar is wonky here? Is this really an attribute?
|
||||
if c == 'attribute':
|
||||
c = c[0]
|
||||
collections.append(c)
|
||||
pass
|
||||
elif n in ('list_if', 'list_if_not'):
|
||||
# FIXME: just a guess
|
||||
if n[0].kind == 'expr':
|
||||
list_if = n
|
||||
list_ifs.append(n)
|
||||
else:
|
||||
list_if = n[1]
|
||||
list_ifs.append([1])
|
||||
n = n[2]
|
||||
pass
|
||||
pass
|
||||
|
||||
assert n == 'lc_body', ast
|
||||
|
||||
# FIXME: add indentation around "for"'s and "in"'s
|
||||
self.preorder(n[0])
|
||||
self.write(' for ')
|
||||
self.preorder(store)
|
||||
self.write(' in ')
|
||||
self.preorder(collection)
|
||||
if list_if:
|
||||
self.preorder(list_if)
|
||||
if self.version < 3.6:
|
||||
self.write(' for ')
|
||||
self.preorder(stores[0])
|
||||
self.write(' in ')
|
||||
self.preorder(collections[0])
|
||||
if list_ifs:
|
||||
self.preorder(list_ifs[0])
|
||||
pass
|
||||
else:
|
||||
for i, store in enumerate(stores):
|
||||
self.write(' for ')
|
||||
self.preorder(store)
|
||||
self.write(' in ')
|
||||
self.preorder(collections[i])
|
||||
if i < len(list_ifs):
|
||||
self.preorder(list_ifs[i])
|
||||
pass
|
||||
pass
|
||||
self.prec = p
|
||||
|
||||
def n_listcomp(self, node):
|
||||
@@ -1705,7 +1743,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n_dict_comp = n_set_comp
|
||||
|
||||
def setcomprehension_walk3(self, node, collection_index):
|
||||
"""List comprehensions the way they are done in Python3.
|
||||
"""Set comprehensions the way they are done in Python3.
|
||||
They're more other comprehensions, e.g. set comprehensions
|
||||
See if we can combine code.
|
||||
"""
|
||||
@@ -2676,7 +2714,7 @@ def code_deparse(co, out=sys.stdout, version=None, debug_opts=DEFAULT_DEBUG_OPTS
|
||||
|
||||
# Build Syntax Tree from disassembly.
|
||||
linestarts = dict(scanner.opc.findlinestarts(co))
|
||||
deparsed = walker(version, out, scanner, showast=debug_opts['ast'],
|
||||
deparsed = walker(version, out, scanner, showast=debug_opts.get('ast', None),
|
||||
debug_parser=debug_parser, compile_mode=compile_mode,
|
||||
is_pypy=is_pypy, linestarts=linestarts)
|
||||
|
||||
|
Reference in New Issue
Block a user