This commit is contained in:
rocky
2022-03-04 05:07:31 -05:00
parent 05f743ed14
commit 2efe2b5b47
17 changed files with 316 additions and 165 deletions

View File

@@ -1,5 +1,5 @@
# Python 3.6, uses rule: # Python 3.6, uses rule:
# genexpr ::= load_closure load_genexpr LOAD_CONST # genexpr ::= load_closure load_genexpr LOAD_CONST
# MAKE_FUNCTION_8 expr GET_ITER CALL_FUNCTION_1 # MAKE_FUNCTION_CLOSURE expr GET_ITER CALL_FUNCTION_1
def __sub__(self, other): # SList()-other def __sub__(self, other): # SList()-other
return self.__class__(i for i in self if i not in other) return self.__class__(i for i in self if i not in other)

View File

@@ -4,8 +4,8 @@ def __init__(self, msg = None, digestmod = None):
self.digest_cons = lambda d='': digestmod.new(d) self.digest_cons = lambda d='': digestmod.new(d)
# From Python 3.6 functools.py # From Python 3.6 functools.py
# Bug was handling lambda for MAKE_FUNCTION_8 (closure) # Bug was handling lambda for MAKE_FUNCTION_CLOSURE (closure)
# vs to MAKE_FUNCTION_9 (pos_args + closure) # vs to MAKE_FUNCTION_CLOSURE_POS (pos_args + closure)
def bug(): def bug():
def register(cls, func=None): def register(cls, func=None):
return lambda f: register(cls, f) return lambda f: register(cls, f)

View File

@@ -133,7 +133,8 @@ def main_bin():
elif opt in ('--tree+', '-T'): elif opt in ('--tree+', '-T'):
if 'showast' not in options: if 'showast' not in options:
options['showast'] = {} options['showast'] = {}
options['showast']['Full'] = True options['showast']['after'] = True
options['showast']['before'] = True
options['do_verify'] = None options['do_verify'] = None
elif opt in ('--grammar', '-g'): elif opt in ('--grammar', '-g'):
options['showgrammar'] = True options['showgrammar'] = True

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2018-2021 Rocky Bernstein <rocky@gnu.org> # Copyright (C) 2018-2022 Rocky Bernstein <rocky@gnu.org>
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@@ -19,6 +19,7 @@ from xdis import iscode
from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE, version_tuple_to_str from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE, version_tuple_to_str
from uncompyle6.disas import check_object_path from uncompyle6.disas import check_object_path
from uncompyle6.semantics import pysource from uncompyle6.semantics import pysource
from uncompyle6.semantics.pysource import PARSER_DEFAULT_DEBUG
from uncompyle6.parser import ParserError from uncompyle6.parser import ParserError
from uncompyle6.version import __version__ from uncompyle6.version import __version__
@@ -42,9 +43,9 @@ def _get_outstream(outfile):
return open(outfile, 'wb') return open(outfile, 'wb')
def decompile( def decompile(
bytecode_version,
co, co,
out=None, bytecode_version = PYTHON_VERSION_TRIPLE,
out=sys.stdout,
showasm=None, showasm=None,
showast={}, showast={},
timestamp=None, timestamp=None,
@@ -98,7 +99,7 @@ def decompile(
write("# -*- coding: %s -*-" % source_encoding) write("# -*- coding: %s -*-" % source_encoding)
write( write(
"# uncompyle6 version %s\n" "# uncompyle6 version %s\n"
"# %sPython bytecode %s%s\n# Decompiled from: %sPython %s" "# %sPython bytecode version base %s%s\n# Decompiled from: %sPython %s"
% ( % (
__version__, __version__,
co_pypy_str, co_pypy_str,
@@ -107,10 +108,6 @@ def decompile(
"\n# ".join(sys_version_lines), "\n# ".join(sys_version_lines),
) )
) )
if bytecode_version >= 3.0:
write(
"# Warning: this version of Python has problems handling the Python 3 byte type in constants properly.\n"
)
if co.co_filename: if co.co_filename:
write("# Embedded file name: %s" % co.co_filename) write("# Embedded file name: %s" % co.co_filename)
if timestamp: if timestamp:
@@ -120,7 +117,17 @@ def decompile(
real_out.write("# Size of source mod 2**32: %d bytes\n" % real_out.write("# Size of source mod 2**32: %d bytes\n" %
source_size) source_size)
debug_opts = {"asm": showasm, "ast": showast, "grammar": showgrammar} # maybe a second -a will do before as well
if showasm:
asm = "after"
else:
asm = None
grammar = dict(PARSER_DEFAULT_DEBUG)
if showgrammar:
grammar["reduce"] = True
debug_opts = {"asm": asm, "tree": showast, "grammar": grammar}
try: try:
if mapstream: if mapstream:
@@ -128,10 +135,12 @@ def decompile(
mapstream = _get_outstream(mapstream) mapstream = _get_outstream(mapstream)
deparsed = deparse_code_with_map( deparsed = deparse_code_with_map(
bytecode_version,
co, co,
out, out,
bytecode_version, showasm,
debug_opts, showast,
showgrammar,
code_objects=code_objects, code_objects=code_objects,
is_pypy=is_pypy, is_pypy=is_pypy,
compile_mode=compile_mode, compile_mode=compile_mode,
@@ -182,7 +191,7 @@ def decompile_file(
filename, filename,
outstream=None, outstream=None,
showasm=None, showasm=None,
showast=False, showast={},
showgrammar=False, showgrammar=False,
source_encoding=None, source_encoding=None,
mapstream=None, mapstream=None,
@@ -201,11 +210,11 @@ def decompile_file(
if isinstance(co, list): if isinstance(co, list):
deparsed = [] deparsed = []
for con in co: for bytecode in co:
deparsed.append( deparsed.append(
decompile( decompile(
bytecode,
version, version,
con,
outstream, outstream,
showasm, showasm,
showast, showast,
@@ -215,14 +224,14 @@ def decompile_file(
code_objects=code_objects, code_objects=code_objects,
is_pypy=is_pypy, is_pypy=is_pypy,
magic_int=magic_int, magic_int=magic_int,
mapstream=mapstream,
), ),
mapstream=mapstream,
) )
else: else:
deparsed = [ deparsed = [
decompile( decompile(
version,
co, co,
version,
outstream, outstream,
showasm, showasm,
showast, showast,
@@ -235,6 +244,7 @@ def decompile_file(
magic_int=magic_int, magic_int=magic_int,
mapstream=mapstream, mapstream=mapstream,
do_fragments=do_fragments, do_fragments=do_fragments,
compile_mode="exec",
) )
] ]
co = None co = None
@@ -249,7 +259,7 @@ def main(
source_files, source_files,
outfile=None, outfile=None,
showasm=None, showasm=None,
showast=False, showast={},
do_verify=False, do_verify=False,
showgrammar=False, showgrammar=False,
source_encoding=None, source_encoding=None,

View File

@@ -219,19 +219,19 @@ class Python36Parser(Python35Parser):
formatted_value2 ::= expr expr FORMAT_VALUE_ATTR formatted_value2 ::= expr expr FORMAT_VALUE_ATTR
""" """
self.add_unique_doc_rules(rules_str, customize) self.add_unique_doc_rules(rules_str, customize)
elif opname == 'MAKE_FUNCTION_8': elif opname == 'MAKE_FUNCTION_CLOSURE':
if 'LOAD_DICTCOMP' in self.seen_ops: if 'LOAD_DICTCOMP' in self.seen_ops:
# Is there something general going on here? # Is there something general going on here?
rule = """ rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)
elif 'LOAD_SETCOMP' in self.seen_ops: elif 'LOAD_SETCOMP' in self.seen_ops:
rule = """ rule = """
set_comp ::= load_closure LOAD_SETCOMP LOAD_STR set_comp ::= load_closure LOAD_SETCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)

View File

@@ -1204,19 +1204,19 @@ class Python37Parser(Python37BaseParser):
formatted_value2 ::= expr expr FORMAT_VALUE_ATTR formatted_value2 ::= expr expr FORMAT_VALUE_ATTR
""" """
self.add_unique_doc_rules(rules_str, customize) self.add_unique_doc_rules(rules_str, customize)
elif opname == "MAKE_FUNCTION_8": elif opname == "MAKE_FUNCTION_CLOSURE":
if "LOAD_DICTCOMP" in self.seen_ops: if "LOAD_DICTCOMP" in self.seen_ops:
# Is there something general going on here? # Is there something general going on here?
rule = """ rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)
elif "LOAD_SETCOMP" in self.seen_ops: elif "LOAD_SETCOMP" in self.seen_ops:
rule = """ rule = """
set_comp ::= load_closure LOAD_SETCOMP LOAD_STR set_comp ::= load_closure LOAD_SETCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)

View File

@@ -584,6 +584,21 @@ class Python37BaseParser(PythonParser):
""" """
self.add_unique_doc_rules(rules_str, customize) self.add_unique_doc_rules(rules_str, customize)
elif opname == "GET_ANEXT":
self.addRule(
"""
func_async_prefix ::= _come_froms SETUP_FINALLY GET_ANEXT LOAD_CONST YIELD_FROM POP_BLOCK
func_async_middle ::= JUMP_FORWARD COME_FROM_EXCEPT
DUP_TOP LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_TRUE
list_afor2 ::= func_async_prefix
store list_iter
JUMP_BACK COME_FROM_FINALLY
END_ASYNC_FOR
""",
nop_func,
)
custom_ops_processed.add(opname)
elif opname == "FORMAT_VALUE_ATTR": elif opname == "FORMAT_VALUE_ATTR":
rules_str = """ rules_str = """
expr ::= formatted_value2 expr ::= formatted_value2
@@ -932,19 +947,19 @@ class Python37BaseParser(PythonParser):
) )
self.add_unique_rule(rule, opname, token.attr, customize) self.add_unique_rule(rule, opname, token.attr, customize)
elif opname == "MAKE_FUNCTION_8": elif opname == "MAKE_FUNCTION_CLOSURE":
if "LOAD_DICTCOMP" in self.seen_ops: if "LOAD_DICTCOMP" in self.seen_ops:
# Is there something general going on here? # Is there something general going on here?
rule = """ rule = """
dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR dict_comp ::= load_closure LOAD_DICTCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)
elif "LOAD_SETCOMP" in self.seen_ops: elif "LOAD_SETCOMP" in self.seen_ops:
rule = """ rule = """
set_comp ::= load_closure LOAD_SETCOMP LOAD_STR set_comp ::= load_closure LOAD_SETCOMP LOAD_STR
MAKE_FUNCTION_8 expr MAKE_FUNCTION_CLOSURE expr
GET_ITER CALL_FUNCTION_1 GET_ITER CALL_FUNCTION_1
""" """
self.addRule(rule, nop_func) self.addRule(rule, nop_func)

View File

@@ -74,6 +74,11 @@ class Python38Parser(Python37Parser):
COME_FROM_FINALLY COME_FROM_FINALLY
END_ASYNC_FOR END_ASYNC_FOR
genexpr_func_async ::= LOAD_FAST func_async_prefix
store comp_iter
JUMP_BACK COME_FROM_FINALLY
END_ASYNC_FOR
# FIXME: come froms after the else_suite or END_ASYNC_FOR distinguish which of # FIXME: come froms after the else_suite or END_ASYNC_FOR distinguish which of
# for / forelse is used. Add come froms and check of add up control-flow detection phase. # for / forelse is used. Add come froms and check of add up control-flow detection phase.
async_forelse_stmt38 ::= expr async_forelse_stmt38 ::= expr

View File

@@ -397,7 +397,13 @@ class Scanner3(Scanner):
if self.version >= (3, 6): if self.version >= (3, 6):
# 3.6+ doesn't have MAKE_CLOSURE, so opname == 'MAKE_FUNCTION' # 3.6+ doesn't have MAKE_CLOSURE, so opname == 'MAKE_FUNCTION'
flags = argval flags = argval
opname = "MAKE_FUNCTION_%d" % (flags) # FIXME: generalize this
if flags == 8:
opname = "MAKE_FUNCTION_CLOSURE"
elif flags == 9:
opname = "MAKE_FUNCTION_CLOSURE_POS"
else:
opname = "MAKE_FUNCTION_%d" % (flags)
attr = [] attr = []
for flag in self.MAKE_FUNCTION_FLAGS: for flag in self.MAKE_FUNCTION_FLAGS:
bit = flags & 1 bit = flags & 1

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2018 by Rocky Bernstein # Copyright (c) 2018, 2022 by Rocky Bernstein
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@@ -141,8 +141,8 @@ def code_deparse_align(co, out=sys.stderr, version=None, is_pypy=None,
debug_parser=debug_parser, compile_mode=compile_mode, debug_parser=debug_parser, compile_mode=compile_mode,
is_pypy = is_pypy) is_pypy = is_pypy)
isTopLevel = co.co_name == '<module>' is_top_level_module = co.co_name == '<module>'
deparsed.ast = deparsed.build_ast(tokens, customize, co, isTopLevel=isTopLevel) deparsed.ast = deparsed.build_ast(tokens, customize, co, is_top_level_module=is_top_level_module)
assert deparsed.ast == 'stmts', 'Should have parsed grammar start' assert deparsed.ast == 'stmts', 'Should have parsed grammar start'

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2017-2021 by Rocky Bernstein # Copyright (c) 2017-2022 by Rocky Bernstein
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@@ -44,6 +44,9 @@ maxint = sys.maxint
# say to 100, to make sure we avoid additional prenthesis in # say to 100, to make sure we avoid additional prenthesis in
# call((.. op ..)). # call((.. op ..)).
NO_PARENTHESIS_EVER = 100
# fmt: off
PRECEDENCE = { PRECEDENCE = {
"named_expr": 40, # := "named_expr": 40, # :=
"yield": 38, # Needs to be below named_expr "yield": 38, # Needs to be below named_expr
@@ -168,11 +171,14 @@ TABLE_R = {
"DELETE_ATTR": ("%|del %c.%[-1]{pattr}\n", 0), "DELETE_ATTR": ("%|del %c.%[-1]{pattr}\n", 0),
} }
TABLE_R0 = { # I'll leave this in for historical interest.
# "BUILD_LIST": ( "[%C]", (0,-1,", ") ), # TABLE_R0 it was like TABLE_R but the key was the *child* of the last child,
# "BUILD_TUPLE": ( "(%C)", (0,-1,", ") ), # or a grandchild of the node that this is considered.
# "CALL_FUNCTION": ( "%c(%P)", 0, (1,-1,", ") ), # TABLE_R0 = {
} # "BUILD_LIST": ( "[%C]", (0,-1,", ") ),
# "BUILD_TUPLE": ( "(%C)", (0,-1,", ") ),
# "CALL_FUNCTION": ( "%c(%P)", 0, (1,-1,", ") ),
# }
TABLE_DIRECT = { TABLE_DIRECT = {
"BINARY_ADD": ("+",), "BINARY_ADD": ("+",),
@@ -236,8 +242,19 @@ TABLE_DIRECT = {
(0, "expr", PRECEDENCE["subscript"]), (0, "expr", PRECEDENCE["subscript"]),
(1, "expr"), (1, "expr"),
), ),
"subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"subscript2": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")), "subscript": (
"%p[%p]",
(0, "expr", PRECEDENCE["subscript"]),
(1, "expr", NO_PARENTHESIS_EVER)
),
"subscript2": (
"%p[%p]",
(0, "expr", PRECEDENCE["subscript"]),
(1, "expr", NO_PARENTHESIS_EVER)
),
"store_subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")), "store_subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"STORE_FAST": ("%{pattr}",), "STORE_FAST": ("%{pattr}",),
"STORE_NAME": ("%{pattr}",), "STORE_NAME": ("%{pattr}",),
@@ -427,7 +444,6 @@ TABLE_DIRECT = {
MAP_DIRECT = (TABLE_DIRECT,) MAP_DIRECT = (TABLE_DIRECT,)
MAP_R0 = (TABLE_R0, -1, 0)
MAP_R = (TABLE_R, -1) MAP_R = (TABLE_R, -1)
MAP = { MAP = {
@@ -435,7 +451,6 @@ MAP = {
"call": MAP_R, "call": MAP_R,
"delete": MAP_R, "delete": MAP_R,
"store": MAP_R, "store": MAP_R,
"exprlist": MAP_R0,
} }
ASSIGN_TUPLE_PARAM = lambda param_name: SyntaxTree( ASSIGN_TUPLE_PARAM = lambda param_name: SyntaxTree(

View File

@@ -154,6 +154,7 @@ def customize_for_version3(self, version):
# recurse one step # recurse one step
n = n[0] n = n[0]
# FIXME: adjust for set comprehension
if n == "list_for": if n == "list_for":
stores.append(n[2]) stores.append(n[2])
n = n[3] n = n[3]
@@ -168,13 +169,12 @@ def customize_for_version3(self, version):
c = c[0] c = c[0]
collections.append(c) collections.append(c)
pass pass
elif n in ("list_if", "list_if_not"): elif n in ("list_if", "list_if_not", "list_if_or_not"):
# FIXME: just a guess
if n[0].kind == "expr": if n[0].kind == "expr":
list_ifs.append(n) list_ifs.append(n)
else: else:
list_ifs.append([1]) list_ifs.append([1])
n = n[2] n = n[-2] if n[-1] == "come_from_opt" else n[-1]
pass pass
elif n == "list_if37": elif n == "list_if37":
list_ifs.append(n) list_ifs.append(n)
@@ -184,7 +184,7 @@ def customize_for_version3(self, version):
collections.append(n[0][0]) collections.append(n[0][0])
n = n[1] n = n[1]
stores.append(n[1][0]) stores.append(n[1][0])
n = n[3] n = n[2] if n[2].kind == "list_iter" else n[3]
pass pass
assert n == "lc_body", ast assert n == "lc_body", ast

View File

@@ -338,7 +338,7 @@ def customize_for_version36(self, version):
kwargs = kwargs[0] kwargs = kwargs[0]
call_function_ex = node[-1] call_function_ex = node[-1]
assert call_function_ex == "CALL_FUNCTION_EX_KW" or ( assert call_function_ex == "CALL_FUNCTION_EX_KW" or (
self.version >= 3.6 and call_function_ex == "CALL_FUNCTION_EX" self.version >= (3, 6) and call_function_ex == "CALL_FUNCTION_EX"
) )
# FIXME: decide if the below test be on kwargs == 'dict' # FIXME: decide if the below test be on kwargs == 'dict'
if ( if (

View File

@@ -1,4 +1,4 @@
# Copyright (c) 2015-2019, 2021 by Rocky Bernstein # Copyright (c) 2015-2019, 2021-2022 by Rocky Bernstein
# #
# This program is free software: you can redistribute it and/or modify # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by # it under the terms of the GNU General Public License as published by
@@ -64,6 +64,7 @@ The node position 0 will be associated with "import".
# FIXME: DRY code with pysource # FIXME: DRY code with pysource
import re import re
from StringIO import StringIO
from uncompyle6.semantics import pysource from uncompyle6.semantics import pysource
from uncompyle6 import parser from uncompyle6 import parser
@@ -75,7 +76,7 @@ from uncompyle6.show import maybe_show_asm, maybe_show_tree
from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6.semantics.pysource import ParserError, StringIO from uncompyle6.semantics.pysource import ParserError
from xdis import iscode from xdis import iscode
from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE
@@ -628,32 +629,6 @@ class FragmentsWalker(pysource.SourceWalker, object):
self.indent_less() self.indent_less()
self.prune() # stop recursing self.prune() # stop recursing
def n_list_comp(self, node):
"""List comprehensions"""
p = self.prec
self.prec = 27
n = node[-1]
assert n == "list_iter"
# find innermost node
while n == "list_iter":
n = n[0] # recurse one step
if n == "list_for":
n = n[3]
elif n == "list_if":
n = n[2]
elif n == "list_if_not":
n = n[2]
assert n == "lc_body"
if node[0].kind.startswith("BUILD_LIST"):
start = len(self.f.getvalue())
self.set_pos_info(node[0], start, start + 1)
self.write("[ ")
self.preorder(n[0]) # lc_body
self.preorder(node[-1]) # for/if parts
self.write(" ]")
self.prec = p
self.prune() # stop recursing
def comprehension_walk(self, node, iter_index, code_index=-5): def comprehension_walk(self, node, iter_index, code_index=-5):
p = self.prec p = self.prec
self.prec = 27 self.prec = 27
@@ -946,7 +921,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
self.set_pos_info(node[0], start - 1, start) self.set_pos_info(node[0], start - 1, start)
self.comprehension_walk3(node, 1, 0) self.comprehension_walk3(node, 1, 0)
elif node[0].kind == "load_closure": elif node[0].kind == "load_closure":
self.setcomprehension_walk3(node, collection_index=4) self.closure_walk(node, collection_index=4)
else: else:
self.comprehension_walk(node, iter_index=4) self.comprehension_walk(node, iter_index=4)
self.write("}") self.write("}")
@@ -1011,7 +986,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
): ):
self.set_pos_info(node[1], node[0][0].start, node[0][0].finish) self.set_pos_info(node[1], node[0][0].start, node[0][0].finish)
def setcomprehension_walk3(self, node, collection_index): def closure_walk(self, node, collection_index):
"""Set comprehensions the way they are done in Python3. """Set comprehensions the way they are done in Python3.
They're more other comprehensions, e.g. set comprehensions They're more other comprehensions, e.g. set comprehensions
See if we can combine code. See if we can combine code.
@@ -1185,7 +1160,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
code, code,
is_lambda=False, is_lambda=False,
noneInNames=False, noneInNames=False,
isTopLevel=False, is_top_level_module=False,
): ):
# FIXME: DRY with pysource.py # FIXME: DRY with pysource.py
@@ -1227,7 +1202,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
# Python 3.4's classes can add a "return None" which is # Python 3.4's classes can add a "return None" which is
# invalid syntax. # invalid syntax.
if tokens[-2].kind == "LOAD_CONST": if tokens[-2].kind == "LOAD_CONST":
if isTopLevel or tokens[-2].pattr is None: if is_top_level_module or tokens[-2].pattr is None:
del tokens[-2:] del tokens[-2:]
else: else:
tokens.append(Token("RETURN_LAST")) tokens.append(Token("RETURN_LAST"))
@@ -2102,8 +2077,8 @@ def code_deparse(
is_pypy=is_pypy, is_pypy=is_pypy,
) )
isTopLevel = co.co_name == "<module>" is_top_level_module = co.co_name == "<module>"
deparsed.ast = deparsed.build_ast(tokens, customize, co, isTopLevel=isTopLevel) deparsed.ast = deparsed.build_ast(tokens, customize, co, is_top_level_module=is_top_level_module)
assert deparsed.ast == "stmts", "Should have parsed grammar start" assert deparsed.ast == "stmts", "Should have parsed grammar start"

View File

@@ -43,7 +43,7 @@ Python.
# describe rules and not have to create methods at all. # describe rules and not have to create methods at all.
# #
# So another other way to specify a semantic rule for a nonterminal is via # So another other way to specify a semantic rule for a nonterminal is via
# one of the tables MAP_R0, MAP_R, or MAP_DIRECT where the key is the # either tables MAP_R, or MAP_DIRECT where the key is the
# nonterminal name. # nonterminal name.
# #
# These dictionaries use a printf-like syntax to direct substitution # These dictionaries use a printf-like syntax to direct substitution
@@ -63,15 +63,14 @@ Python.
# parse tree for N. # parse tree for N.
# #
# #
# N&K N N # N&K N
# / | ... \ / | ... \ / | ... \ # / | ... \ / | ... \
# O O O O O K O O O # O O O O O K
# | #
# K #
# TABLE_DIRECT TABLE_R TABLE_R0 # TABLE_DIRECT TABLE_R
# #
# The default table is TABLE_DIRECT mapping By far, most rules used work this way. # The default table is TABLE_DIRECT mapping By far, most rules used work this way.
# TABLE_R0 is rarely used.
# #
# The key K is then extracted from the subtree and used to find one # The key K is then extracted from the subtree and used to find one
# of the tables, T listed above. The result after applying T[K] is # of the tables, T listed above. The result after applying T[K] is
@@ -139,7 +138,7 @@ from xdis.version_info import PYTHON_VERSION_TRIPLE
from uncompyle6.parser import get_python_parser from uncompyle6.parser import get_python_parser
from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.parsers.treenode import SyntaxTree
from spark_parser import GenericASTTraversal, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from spark_parser import GenericASTTraversal
from uncompyle6.scanner import Code, get_scanner from uncompyle6.scanner import Code, get_scanner
import uncompyle6.parser as python_parser import uncompyle6.parser as python_parser
from uncompyle6.semantics.check_ast import checker from uncompyle6.semantics.check_ast import checker
@@ -185,6 +184,25 @@ from StringIO import StringIO
DEFAULT_DEBUG_OPTS = {"asm": False, "tree": False, "grammar": False} DEFAULT_DEBUG_OPTS = {"asm": False, "tree": False, "grammar": False}
def unicode(x): return x
from StringIO import StringIO
PARSER_DEFAULT_DEBUG = {
"rules": False,
"transition": False,
"reduce": False,
"errorstack": "full",
"context": True,
"dups": False,
}
TREE_DEFAULT_DEBUG = {"before": False, "after": False}
DEFAULT_DEBUG_OPTS = {
"asm": False,
"tree": TREE_DEFAULT_DEBUG,
"grammar": dict(PARSER_DEFAULT_DEBUG),
}
class SourceWalkerError(Exception): class SourceWalkerError(Exception):
def __init__(self, errmsg): def __init__(self, errmsg):
@@ -202,7 +220,7 @@ class SourceWalker(GenericASTTraversal, object):
version, version,
out, out,
scanner, scanner,
showast=False, showast=TREE_DEFAULT_DEBUG,
debug_parser=PARSER_DEFAULT_DEBUG, debug_parser=PARSER_DEFAULT_DEBUG,
compile_mode="exec", compile_mode="exec",
is_pypy=IS_PYPY, is_pypy=IS_PYPY,
@@ -225,9 +243,9 @@ class SourceWalker(GenericASTTraversal, object):
mode that was used to create the Syntax Tree and specifies a mode that was used to create the Syntax Tree and specifies a
gramar variant within a Python version to use. gramar variant within a Python version to use.
`is_pypy' should be True if the Syntax Tree was generated for PyPy. `is_pypy` should be True if the Syntax Tree was generated for PyPy.
`linestarts' is a dictionary of line number to bytecode offset. This `linestarts` is a dictionary of line number to bytecode offset. This
can sometimes assist in determinte which kind of source-code construct can sometimes assist in determinte which kind of source-code construct
to use when there is ambiguity. to use when there is ambiguity.
@@ -244,9 +262,10 @@ class SourceWalker(GenericASTTraversal, object):
is_pypy=is_pypy, is_pypy=is_pypy,
) )
self.treeTransform = TreeTransform( # Initialize p_lambda on demand
version=version, show_ast=showast, is_pypy=is_pypy self.p_lambda = None
)
self.treeTransform = TreeTransform(version=self.version, show_ast=showast)
self.debug_parser = dict(debug_parser) self.debug_parser = dict(debug_parser)
self.showast = showast self.showast = showast
self.params = params self.params = params
@@ -286,25 +305,28 @@ class SourceWalker(GenericASTTraversal, object):
# An example is: # An example is:
# __module__ = __name__ # __module__ = __name__
self.hide_internal = True self.hide_internal = True
self.compile_mode = "exec" self.compile_mode = compile_mode
self.name = None self.name = None
self.version = version self.version = version
self.is_pypy = is_pypy self.is_pypy = is_pypy
customize_for_version(self, is_pypy, version) customize_for_version(self, is_pypy, version)
return return
def maybe_show_tree(self, ast): def maybe_show_tree(self, ast, phase):
if self.showast and self.treeTransform.showast: if self.showast.get("before", False):
self.println( self.println(
""" """
---- end before transform ---- end before transform
"""
)
if self.showast.get("after", False):
self.println(
"""
---- begin after transform ---- begin after transform
""" """
+ " " + " "
) )
if self.showast.get(phase, False):
if isinstance(self.showast, dict) and self.showast.get:
maybe_show_tree(self, ast) maybe_show_tree(self, ast)
def str_with_template(self, ast): def str_with_template(self, ast):
@@ -586,8 +608,10 @@ class SourceWalker(GenericASTTraversal, object):
self.prec = 6 self.prec = 6
# print("XXX", n.kind, p, "<", self.prec) # print("XXX", n.kind, p, "<", self.prec)
# print(self.f.getvalue())
if p < self.prec: if p < self.prec:
# print(f"PREC {p}, {node[0].kind}")
self.write("(") self.write("(")
self.preorder(node[0]) self.preorder(node[0])
self.write(")") self.write(")")
@@ -1111,8 +1135,8 @@ class SourceWalker(GenericASTTraversal, object):
ast = ast[0] ast = ast[0]
n = ast[iter_index] n = ast[iter_index]
assert n == "comp_iter", n
assert n == "comp_iter", n.kind
# Find the comprehension body. It is the inner-most # Find the comprehension body. It is the inner-most
# node that is not list_.. . # node that is not list_.. .
while n == "comp_iter": # list_iter while n == "comp_iter": # list_iter
@@ -1154,10 +1178,24 @@ class SourceWalker(GenericASTTraversal, object):
code_index = -6 code_index = -6
if self.version > (3, 6): if self.version > (3, 6):
# Python 3.7+ adds optional "come_froms" at node[0] # Python 3.7+ adds optional "come_froms" at node[0]
iter_index = 4 if node[0].kind in ("load_closure", "load_genexpr") and self.version >= (3, 8):
is_lambda = self.is_lambda
if node[0].kind == "load_genexpr":
self.is_lambda = False
self.closure_walk(node, collection_index=4)
self.is_lambda = is_lambda
else:
code_index = -6
if self.version < (3, 8):
iter_index = 4
else:
iter_index = 3
self.comprehension_walk(node, iter_index=iter_index, code_index=code_index)
pass
pass
else: else:
code_index = -5 code_index = -5
self.comprehension_walk(node, iter_index=iter_index, code_index=code_index) self.comprehension_walk(node, iter_index=iter_index, code_index=code_index)
self.write(")") self.write(")")
self.prune() self.prune()
@@ -1168,7 +1206,7 @@ class SourceWalker(GenericASTTraversal, object):
if node[0] in ["LOAD_SETCOMP", "LOAD_DICTCOMP"]: if node[0] in ["LOAD_SETCOMP", "LOAD_DICTCOMP"]:
self.comprehension_walk_newer(node, 1, 0) self.comprehension_walk_newer(node, 1, 0)
elif node[0].kind == "load_closure" and self.version >= (3, 0): elif node[0].kind == "load_closure" and self.version >= (3, 0):
self.setcomprehension_walk3(node, collection_index=4) self.closure_walk(node, collection_index=4)
else: else:
self.comprehension_walk(node, iter_index=4) self.comprehension_walk(node, iter_index=4)
self.write("}") self.write("}")
@@ -1180,15 +1218,19 @@ class SourceWalker(GenericASTTraversal, object):
"""Non-closure-based comprehensions the way they are done in Python3 """Non-closure-based comprehensions the way they are done in Python3
and some Python 2.7. Note: there are also other set comprehensions. and some Python 2.7. Note: there are also other set comprehensions.
""" """
# FIXME: DRY with listcomp_closure3
p = self.prec p = self.prec
self.prec = 27 self.prec = 27
code_obj = node[code_index].attr code_obj = node[code_index].attr
assert iscode(code_obj), node[code_index] assert iscode(code_obj), node[code_index]
self.debug_opts["asm"]
code = Code(code_obj, self.scanner, self.currentclass, self.debug_opts["asm"]) code = Code(code_obj, self.scanner, self.currentclass, self.debug_opts["asm"])
ast = self.build_ast(code._tokens, code._customize, code) ast = self.build_ast(
code._tokens, code._customize, code, is_lambda=self.is_lambda
)
self.customize(code._customize) self.customize(code._customize)
# skip over: sstmt, stmt, return, return_expr # skip over: sstmt, stmt, return, return_expr
@@ -1336,7 +1378,6 @@ class SourceWalker(GenericASTTraversal, object):
else: else:
self.preorder(store) self.preorder(store)
# FIXME this is all merely approximate
self.write(" in ") self.write(" in ")
self.preorder(node[in_node_index]) self.preorder(node[in_node_index])
@@ -1356,6 +1397,7 @@ class SourceWalker(GenericASTTraversal, object):
self.write(" if ") self.write(" if ")
if have_not: if have_not:
self.write("not ") self.write("not ")
pass
self.prec = 27 self.prec = 27
self.preorder(if_node) self.preorder(if_node)
pass pass
@@ -1375,32 +1417,86 @@ class SourceWalker(GenericASTTraversal, object):
self.write("]") self.write("]")
self.prune() self.prune()
def setcomprehension_walk3(self, node, collection_index): def get_comprehension_function(self, node, code_index):
"""Set comprehensions the way they are done in Python3. """
They're more other comprehensions, e.g. set comprehensions Build the body of a comprehension function and then
See if we can combine code. find the comprehension node buried in the tree which may
be surrounded with start-like symbols or dominiators,.
"""
self.prec = 27
code_node = node[code_index]
if code_node == "load_genexpr":
code_node = code_node[0]
code_obj = code_node.attr
assert iscode(code_obj), code_node
code = Code(code_obj, self.scanner, self.currentclass, self.debug_opts["asm"])
# FIXME: is there a way we can avoid this?
# The problem is that in filterint top-level list comprehensions we can
# encounter comprehensions of other kinds, and lambdas
if self.compile_mode in ("listcomp",): # add other comprehensions to this list
p_save = self.p
self.p = get_python_parser(
self.version, compile_mode="exec", is_pypy=self.is_pypy,
)
tree = self.build_ast(
code._tokens, code._customize, code, is_lambda=self.is_lambda
)
self.p = p_save
else:
tree = self.build_ast(
code._tokens, code._customize, code, is_lambda=self.is_lambda
)
self.customize(code._customize)
# skip over: sstmt, stmt, return, return_expr
# and other singleton derivations
if tree == "lambda_start":
if tree[0] in ("dom_start", "dom_start_opt"):
tree = tree[1]
while len(tree) == 1 or (
tree in ("stmt", "sstmt", "return", "return_expr", "return_expr_lambda")
):
self.prec = 100
tree = tree[0]
return tree
def closure_walk(self, node, collection_index):
"""Dictionary and comprehensions using closure the way they are done in Python3.
""" """
p = self.prec p = self.prec
self.prec = 27 self.prec = 27
code = Code(node[1].attr, self.scanner, self.currentclass) if node[0] == "load_genexpr":
ast = self.build_ast(code._tokens, code._customize, code) code_index = 0
self.customize(code._customize) else:
code_index = 1
tree = self.get_comprehension_function(node, code_index=code_index)
# Remove single reductions as in ("stmts", "sstmt"): # Remove single reductions as in ("stmts", "sstmt"):
while len(ast) == 1: while len(tree) == 1:
ast = ast[0] tree = tree[0]
store = ast[3] store = tree[3]
collection = node[collection_index] collection = node[collection_index]
n = ast[4] if tree == "genexpr_func_async":
iter_index = 3
else:
iter_index = 4
n = tree[iter_index]
list_if = None list_if = None
assert n == "comp_iter" assert n == "comp_iter"
# Find inner-most node. # Find inner-most node.
while n == "comp_iter": while n == "comp_iter":
n = n[0] # recurse one step n = n[0] # recurse one step
# FIXME: adjust for set comprehension # FIXME: adjust for set comprehension
if n == "list_for": if n == "list_for":
store = n[2] store = n[2]
@@ -1419,7 +1515,7 @@ class SourceWalker(GenericASTTraversal, object):
pass pass
pass pass
assert n == "comp_body", ast assert n == "comp_body", tree
self.preorder(n[0]) self.preorder(n[0])
self.write(" for ") self.write(" for ")
@@ -1813,6 +1909,7 @@ class SourceWalker(GenericASTTraversal, object):
self.kv_map(node[-1], sep, line_number, indent) self.kv_map(node[-1], sep, line_number, indent)
pass pass
pass
if sep.startswith(",\n"): if sep.startswith(",\n"):
self.write(sep[1:]) self.write(sep[1:])
if node[0] != "dict_entry": if node[0] != "dict_entry":
@@ -1874,6 +1971,7 @@ class SourceWalker(GenericASTTraversal, object):
self.write("(") self.write("(")
endchar = ")" endchar = ")"
else: else:
# from trepan.api import debug; debug()
raise TypeError( raise TypeError(
"Internal Error: n_build_list expects list, tuple, set, or unpack" "Internal Error: n_build_list expects list, tuple, set, or unpack"
) )
@@ -2044,23 +2142,22 @@ class SourceWalker(GenericASTTraversal, object):
index = entry[arg] index = entry[arg]
if isinstance(index, tuple): if isinstance(index, tuple):
if isinstance(index[1], str): if isinstance(index[1], str):
# if node[index[0]] != index[1]:
# from trepan.api import debug; debug()
assert node[index[0]] == index[1], ( assert node[index[0]] == index[1], (
"at %s[%d], expected '%s' node; got '%s'" "at %s[%d], expected '%s' node; got '%s'"
% (node.kind, arg, index[1], node[index[0]].kind) % (node.kind, arg, index[1], node[index[0]].kind,)
) )
else: else:
assert node[index[0]] in index[1], ( assert node[index[0]] in index[1], (
"at %s[%d], expected to be in '%s' node; got '%s'" "at %s[%d], expected to be in '%s' node; got '%s'"
% (node.kind, arg, index[1], node[index[0]].kind) % (node.kind, arg, index[1], node[index[0]].kind,)
) )
index = index[0] index = index[0]
assert isinstance( assert isinstance(index, int), (
index, int "at %s[%d], %s should be int or tuple"
), "at %s[%d], %s should be int or tuple" % ( % (node.kind, arg, type(index),)
node.kind,
arg,
type(index),
) )
try: try:
@@ -2082,10 +2179,17 @@ class SourceWalker(GenericASTTraversal, object):
assert isinstance(tup, tuple) assert isinstance(tup, tuple)
if len(tup) == 3: if len(tup) == 3:
(index, nonterm_name, self.prec) = tup (index, nonterm_name, self.prec) = tup
assert node[index] == nonterm_name, ( if isinstance(tup[1], str):
"at %s[%d], expected '%s' node; got '%s'" assert node[index] == nonterm_name, (
% (node.kind, arg, nonterm_name, node[index].kind) "at %s[%d], expected '%s' node; got '%s'"
) % (node.kind, arg, nonterm_name, node[index].kind,)
)
else:
assert node[tup[0]] in tup[1], (
"at %s[%d], expected to be in '%s' node; got '%s'"
% (node.kind, arg, index[1], node[index[0]].kind,)
)
else: else:
assert len(tup) == 2 assert len(tup) == 2
(index, self.prec) = entry[arg] (index, self.prec) = entry[arg]
@@ -2416,10 +2520,10 @@ class SourceWalker(GenericASTTraversal, object):
# print stmt[-1] # print stmt[-1]
# Add "global" declaration statements at the top
globals, nonlocals = find_globals_and_nonlocals( globals, nonlocals = find_globals_and_nonlocals(
ast, set(), set(), code, self.version ast, set(), set(), code, self.version
) )
# Add "global" declaration statements at the top
# of the function # of the function
for g in sorted(globals): for g in sorted(globals):
self.println(indent, "global ", g) self.println(indent, "global ", g)
@@ -2458,11 +2562,8 @@ class SourceWalker(GenericASTTraversal, object):
self.println(self.indent, "pass") self.println(self.indent, "pass")
else: else:
self.customize(customize) self.customize(customize)
if is_lambda: self.text = self.traverse(ast, is_lambda=is_lambda)
self.write(self.traverse(ast, is_lambda=is_lambda)) self.println(self.text)
else:
self.text = self.traverse(ast, is_lambda=is_lambda)
self.println(self.text)
self.name = old_name self.name = old_name
self.return_none = rn self.return_none = rn
@@ -2473,7 +2574,7 @@ class SourceWalker(GenericASTTraversal, object):
code, code,
is_lambda=False, is_lambda=False,
noneInNames=False, noneInNames=False,
isTopLevel=False, is_top_level_module=False,
): ):
# FIXME: DRY with fragments.py # FIXME: DRY with fragments.py
@@ -2500,10 +2601,10 @@ class SourceWalker(GenericASTTraversal, object):
raise ParserError(e, tokens, self.p.debug["reduce"]) raise ParserError(e, tokens, self.p.debug["reduce"])
except AssertionError, e: except AssertionError, e:
raise ParserError(e, tokens, self.p.debug["reduce"]) raise ParserError(e, tokens, self.p.debug["reduce"])
transform_ast = self.treeTransform.transform(ast, code) transform_tree = self.treeTransform.transform(ast, code)
self.maybe_show_tree(ast) self.maybe_show_tree(ast, phase="after")
del ast # Save memory del ast # Save memory
return transform_ast return transform_tree
# The bytecode for the end of the main routine has a # The bytecode for the end of the main routine has a
# "return None". However you can't issue a "return" statement in # "return None". However you can't issue a "return" statement in
@@ -2515,7 +2616,7 @@ class SourceWalker(GenericASTTraversal, object):
# Python 3.4's classes can add a "return None" which is # Python 3.4's classes can add a "return None" which is
# invalid syntax. # invalid syntax.
if tokens[-2].kind == "LOAD_CONST": if tokens[-2].kind == "LOAD_CONST":
if isTopLevel or tokens[-2].pattr is None: if is_top_level_module or tokens[-2].pattr is None:
del tokens[-2:] del tokens[-2:]
else: else:
tokens.append(Token("RETURN_LAST")) tokens.append(Token("RETURN_LAST"))
@@ -2540,12 +2641,12 @@ class SourceWalker(GenericASTTraversal, object):
checker(ast, False, self.ast_errors) checker(ast, False, self.ast_errors)
self.customize(customize) self.customize(customize)
transform_ast = self.treeTransform.transform(ast, code) transform_tree = self.treeTransform.transform(ast, code)
self.maybe_show_tree(ast) self.maybe_show_tree(ast, phase="before")
del ast # Save memory del ast # Save memory
return transform_ast return transform_tree
@classmethod @classmethod
def _get_mapping(cls, node): def _get_mapping(cls, node):
@@ -2573,16 +2674,13 @@ def code_deparse(
version = PYTHON_VERSION_TRIPLE version = PYTHON_VERSION_TRIPLE
# store final output stream for case of error # store final output stream for case of error
scanner = get_scanner(version, is_pypy=is_pypy) scanner = get_scanner(version, is_pypy=is_pypy, show_asm=debug_opts["asm"])
tokens, customize = scanner.ingest( tokens, customize = scanner.ingest(
co, code_objects=code_objects, show_asm=debug_opts["asm"] co, code_objects=code_objects, show_asm=debug_opts["asm"]
) )
debug_parser = dict(PARSER_DEFAULT_DEBUG) debug_parser = debug_opts.get("grammar", dict(PARSER_DEFAULT_DEBUG))
if debug_opts.get("grammar", None):
debug_parser["reduce"] = debug_opts["grammar"]
debug_parser["errorstack"] = "full"
# Build Syntax Tree from disassembly. # Build Syntax Tree from disassembly.
linestarts = dict(scanner.opc.findlinestarts(co)) linestarts = dict(scanner.opc.findlinestarts(co))
@@ -2590,27 +2688,49 @@ def code_deparse(
version, version,
out, out,
scanner, scanner,
showast=debug_opts.get("ast", None), showast=debug_opts.get("tree", TREE_DEFAULT_DEBUG),
debug_parser=debug_parser, debug_parser=debug_parser,
compile_mode=compile_mode, compile_mode=compile_mode,
is_pypy=is_pypy, is_pypy=is_pypy,
linestarts=linestarts, linestarts=linestarts,
) )
isTopLevel = co.co_name == "<module>" is_top_level_module = co.co_name == "<module>"
if compile_mode == "eval": if compile_mode == "eval":
deparsed.hide_internal = False deparsed.hide_internal = False
deparsed.ast = deparsed.build_ast(tokens, customize, co, isTopLevel=isTopLevel) deparsed.compile_mode = compile_mode
deparsed.ast = deparsed.build_ast(
tokens,
customize,
co,
is_lambda=(compile_mode == "lambda"),
is_top_level_module=is_top_level_module,
)
#### XXX workaround for profiling #### XXX workaround for profiling
if deparsed.ast is None: if deparsed.ast is None:
return None return None
if compile_mode != "eval": # FIXME use a lookup table here.
assert deparsed.ast == "stmts", "Should have parsed grammar start" if compile_mode == "lambda":
expected_start = "lambda_start"
elif compile_mode == "eval":
expected_start = "expr_start"
elif compile_mode == "expr":
expected_start = "expr_start"
elif compile_mode == "exec":
expected_start = "stmts"
elif compile_mode == "single":
expected_start = "single_start"
else: else:
assert deparsed.ast == "eval_expr", "Should have parsed grammar start" expected_start = None
if expected_start:
assert (
deparsed.ast == expected_start
), (
"Should have parsed grammar start to '%s'; got: %s" %
(expected_start, deparsed.ast.kind)
)
# save memory # save memory
del tokens del tokens
@@ -2652,7 +2772,11 @@ def code_deparse(
# What we've been waiting for: Generate source from Syntax Tree! # What we've been waiting for: Generate source from Syntax Tree!
deparsed.gen_source( deparsed.gen_source(
deparsed.ast, name=co.co_name, customize=customize, debug_opts=debug_opts deparsed.ast,
name=co.co_name,
customize=customize,
is_lambda=compile_mode == "lambda",
debug_opts=debug_opts,
) )
for g in sorted(deparsed.mod_globs): for g in sorted(deparsed.mod_globs):
@@ -2660,7 +2784,7 @@ def code_deparse(
if deparsed.ast_errors: if deparsed.ast_errors:
deparsed.write("# NOTE: have internal decompilation grammar errors.\n") deparsed.write("# NOTE: have internal decompilation grammar errors.\n")
deparsed.write("# Use -t option to show full context.") deparsed.write("# Use -T option to show full context.")
for err in deparsed.ast_errors: for err in deparsed.ast_errors:
deparsed.write(err) deparsed.write(err)
raise SourceWalkerError("Deparsing hit an internal grammar-rule bug") raise SourceWalkerError("Deparsing hit an internal grammar-rule bug")

View File

@@ -52,7 +52,7 @@ def maybe_show_tree(walker, ast):
stream = sys.stdout stream = sys.stdout
if ( if (
isinstance(walker.showast, dict) isinstance(walker.showast, dict)
and walker.showast.get("Full", False) and walker.showast.get("after", False)
and hasattr(walker, "str_with_template") and hasattr(walker, "str_with_template")
): ):
walker.str_with_template(ast) walker.str_with_template(ast)

View File

@@ -14,4 +14,4 @@
# This file is suitable for sourcing inside POSIX shell as # This file is suitable for sourcing inside POSIX shell as
# well as importing into Python # well as importing into Python
# fmt: off # fmt: off
__version__="3.8.1.dev0" # noqa __version__="3.9.0a1" # noqa