diff --git a/uncompyle6/main.py b/uncompyle6/main.py index 7f5902c3..a745df15 100644 --- a/uncompyle6/main.py +++ b/uncompyle6/main.py @@ -50,7 +50,7 @@ def _get_outstream(outfile): def decompile( co, - bytecode_version = PYTHON_VERSION_TRIPLE, + bytecode_version=PYTHON_VERSION_TRIPLE, out=sys.stdout, showasm=None, showast={}, @@ -83,13 +83,13 @@ def decompile( s += "\n" real_out.write(s) - assert iscode(co), ("%s does not smell like code" % co) + assert iscode(co), "%s does not smell like code" % co co_pypy_str = "PyPy " if is_pypy else "" run_pypy_str = "PyPy " if IS_PYPY else "" sys_version_lines = sys.version.split("\n") if source_encoding: - write(f"# -*- coding: {source_encoding} -*-") + write("# -*- coding: %s -*-" % source_encoding) write( "# uncompyle6 version %s\n" "# %sPython bytecode version base %s%s\n# Decompiled from: %sPython %s" @@ -103,9 +103,9 @@ def decompile( ) ) if co.co_filename: - write(f"# Embedded file name: {co.co_filename}") + write("# Embedded file name: %s" % co.co_filename) if timestamp: - write(f"# Compiled at: {datetime.datetime.fromtimestamp(timestamp)}") + write("# Compiled at: %s" % datetime.datetime.fromtimestamp(timestamp)) if source_size: write("# Size of source mod 2**32: %d bytes" % source_size) @@ -135,7 +135,7 @@ def decompile( (line_no, deparsed.source_linemap[line_no] + header_count) for line_no in sorted(deparsed.source_linemap.keys()) ] - mapstream.write(f"\n\n# {linemap}\n") + mapstream.write("\n\n# %s\n" % linemap) else: if do_fragments: deparse_fn = code_deparse_fragments @@ -163,11 +163,11 @@ def compile_file(source_path): basename = source_path if hasattr(sys, "pypy_version_info"): - bytecode_path = f"{basename}-pypy{version_tuple_to_str()}.pyc" + bytecode_path = "%s-pypy%s.pyc" % (basename, version_tuple_to_str()) else: - bytecode_path = f"{basename}-{version_tuple_to_str()}.pyc" + bytecode_path = "%s-%s.pyc" % (basename, version_tuple_to_str()) - print(f"compiling {source_path} to {bytecode_path}") + print("compiling %s to %s" % (source_path, bytecode_path)) py_compile.compile(source_path, bytecode_path, "exec") return bytecode_path @@ -271,7 +271,7 @@ def main( infile = os.path.join(in_base, filename) # print("XXX", infile) if not os.path.exists(infile): - sys.stderr.write(f"File '{infile}' doesn't exist. Skipped\n") + sys.stderr.write("File '%s' doesn't exist. Skipped\n" % infile) continue if do_linemaps: @@ -319,11 +319,11 @@ def main( ): if e[0] != last_mod: line = "=" * len(e[0]) - outstream.write(f"{line}\n{e[0]}\n{line}\n") + outstream.write("%s\n%s\n%s\n" % (line, e[0], line)) last_mod = e[0] info = offsets[e] extract_info = d.extract_node_info(info) - outstream.write(f"{info.node.format().strip()}" + "\n") + outstream.write("%s" % info.node.format().strip() + "\n") outstream.write(extract_info.selectedLine + "\n") outstream.write(extract_info.markerLine + "\n\n") pass @@ -345,13 +345,15 @@ def main( sys.stdout.write("\n%s\n" % str(e)) if str(e).startswith("Unsupported Python"): sys.stdout.write("\n") - sys.stderr.write(f"\n# Unsupported bytecode in file {infile}\n# {e}\n") + sys.stderr.write( + "\n# Unsupported bytecode in file %s\n# %s\n" % (infile, e) + ) else: if outfile: outstream.close() os.remove(outfile) sys.stdout.write("\n") - sys.stderr.write(f"\nLast file: {infile} ") + sys.stderr.write("\nLast file: %s " % (infile)) raise # except: diff --git a/uncompyle6/parsers/parse37base.py b/uncompyle6/parsers/parse37base.py index b9639e97..a3942f5f 100644 --- a/uncompyle6/parsers/parse37base.py +++ b/uncompyle6/parsers/parse37base.py @@ -2,11 +2,10 @@ """ Python 3.7 base code. We keep non-custom-generated grammar rules out of this file. """ -from uncompyle6.parser import ParserError, PythonParser, nop_func -from uncompyle6.parsers.treenode import SyntaxTree from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from spark_parser.spark import rule2str +from uncompyle6.parser import ParserError, PythonParser, nop_func from uncompyle6.parsers.reducecheck import ( and_invalid, ifelsestmt, @@ -16,9 +15,10 @@ from uncompyle6.parsers.reducecheck import ( or_check, testtrue, tryelsestmtl3, - while1stmt, while1elsestmt, + while1stmt, ) +from uncompyle6.parsers.treenode import SyntaxTree class Python37BaseParser(PythonParser): @@ -54,7 +54,7 @@ class Python37BaseParser(PythonParser): expr call CALL_FUNCTION_3 - """ + """ # FIXME: I bet this can be simplified # look for next MAKE_FUNCTION for i in range(i + 1, len(tokens)): @@ -104,7 +104,6 @@ class Python37BaseParser(PythonParser): # organization for this. For example, arrange organize by opcode base? def customize_grammar_rules(self, tokens, customize): - is_pypy = False # For a rough break out on the first word. This may @@ -321,18 +320,24 @@ class Python37BaseParser(PythonParser): elif opname in ("BUILD_CONST_LIST", "BUILD_CONST_DICT", "BUILD_CONST_SET"): if opname == "BUILD_CONST_DICT": - rule = """ + rule = ( + """ add_consts ::= ADD_VALUE* const_list ::= COLLECTION_START add_consts %s dict ::= const_list expr ::= dict - """ % opname + """ + % opname + ) else: - rule = """ + rule = ( + """ add_consts ::= ADD_VALUE* const_list ::= COLLECTION_START add_consts %s expr ::= const_list - """ % opname + """ + % opname + ) self.addRule(rule, nop_func) elif opname_base == "BUILD_CONST_KEY_MAP": @@ -348,7 +353,6 @@ class Python37BaseParser(PythonParser): self.addRule(rule, nop_func) elif opname_base in ("BUILD_MAP", "BUILD_MAP_UNPACK"): - if opname == "BUILD_MAP_UNPACK": self.addRule( """ @@ -525,7 +529,6 @@ class Python37BaseParser(PythonParser): "CALL_FUNCTION_VAR_KW", ) ) or opname.startswith("CALL_FUNCTION_KW"): - if opname == "CALL_FUNCTION" and token.attr == 1: rule = """ expr ::= dict_comp @@ -1259,12 +1262,11 @@ class Python37BaseParser(PythonParser): if fn: return fn(self, lhs, n, rule, ast, tokens, first, last) except Exception: - import sys, traceback + import sys + import traceback print( - ("Exception in %s %s\n" - + "rule: %s\n" - + "offsets %s .. %s") + ("Exception in %s %s\n" + "rule: %s\n" + "offsets %s .. %s") % ( fn.__name__, sys.exc_info()[1], diff --git a/uncompyle6/scanner.py b/uncompyle6/scanner.py index 0af8304c..e7cfa608 100644 --- a/uncompyle6/scanner.py +++ b/uncompyle6/scanner.py @@ -21,12 +21,10 @@ scanner/ingestion module. From here we call various version-specific scanners, e.g. for Python 2.7 or 3.4. """ +import sys from array import array from collections import namedtuple -import sys -from uncompyle6.scanners.tok import Token -from xdis.version_info import IS_PYPY, version_tuple_to_str import xdis from xdis import ( Bytecode, @@ -36,6 +34,9 @@ from xdis import ( instruction_size, next_offset, ) +from xdis.version_info import IS_PYPY, version_tuple_to_str + +from uncompyle6.scanners.tok import Token # The byte code versions we support. # Note: these all have to be tuples of 2 ints @@ -80,6 +81,7 @@ CANONIC2VERSION["3.5.2"] = 3.5 intern = sys.intern L65536 = 65536 + def long(num): return num @@ -108,9 +110,6 @@ class Scanner: self.show_asm = show_asm self.is_pypy = is_pypy - # Temoorary initialization. - self.opc = ModuleType("uninitialized") - if version[:2] in PYTHON_VERSIONS: v_str = "opcode_%s" % version_tuple_to_str( version, start=0, end=2, delimiter="" @@ -130,9 +129,7 @@ class Scanner: # FIXME: This weird Python2 behavior is not Python3 self.resetTokenClass() - def bound_collection_from_tokens( - self, tokens, t, i, collection_type - ): + def bound_collection_from_tokens(self, tokens, t, i, collection_type): count = t.attr assert isinstance(count, int) @@ -334,7 +331,7 @@ class Scanner: else: print("%i\t%s\t" % (i, self.opname[op])) - def first_instr(self, start: int, end: int, instr, target=None, exact=True): + def first_instr(self, start, end, instr, target=None, exact=True): """ Find the first in the block from start to end. is any python bytecode instruction or a list of opcodes @@ -622,8 +619,7 @@ def parse_fn_counts_30_35(argc): return ((argc & 0xFF), (argc >> 8) & 0xFF, annotate_count) -def get_scanner(version: Union[str, tuple], is_pypy=False, show_asm=None) -> Scanner: - +def get_scanner(version, is_pypy=False, show_asm=None): # If version is a string, turn that into the corresponding float. if isinstance(version, str): if version not in canonic_python_version: @@ -684,5 +680,6 @@ if __name__ == "__main__": # scanner = get_scanner('2.7.13', True) # scanner = get_scanner(sys.version[:5], False) from xdis.version_info import PYTHON_VERSION_TRIPLE + scanner = get_scanner(PYTHON_VERSION_TRIPLE, IS_PYPY, True) tokens, customize = scanner.ingest(co, {}, show_asm="after") diff --git a/uncompyle6/scanners/scanner2.py b/uncompyle6/scanners/scanner2.py index 695f5fa5..fbd242f7 100644 --- a/uncompyle6/scanners/scanner2.py +++ b/uncompyle6/scanners/scanner2.py @@ -36,13 +36,13 @@ Finally we save token information. from __future__ import print_function from copy import copy - -from xdis import code2num, iscode, op_has_argument, instruction_size -from xdis.bytecode import _get_const_info -from uncompyle6.scanner import Scanner, Token - from sys import intern +from xdis import code2num, instruction_size, iscode, op_has_argument +from xdis.bytecode import _get_const_info + +from uncompyle6.scanner import Scanner, Token + class Scanner2(Scanner): def __init__(self, version, show_asm=None, is_pypy=False): @@ -236,7 +236,6 @@ class Scanner2(Scanner): # 'LOAD_ASSERT' is used in assert statements. self.load_asserts = set() for i in self.op_range(0, codelen): - # We need to detect the difference between: # raise AssertionError # and @@ -328,9 +327,14 @@ class Scanner2(Scanner): "BUILD_SET", ): t = Token( - op_name, oparg, pattr, offset, + op_name, + oparg, + pattr, + offset, self.linestarts.get(offset, None), - op, has_arg, self.opc + op, + has_arg, + self.opc, ) collection_type = op_name.split("_")[1] next_tokens = self.bound_collection_from_tokens( @@ -541,14 +545,17 @@ class Scanner2(Scanner): for s in stmt_list: if code[s] == self.opc.JUMP_ABSOLUTE and s not in pass_stmts: target = self.get_target(s) - if target > s or (self.lines and self.lines[last_stmt].l_no == self.lines[s].l_no): + if target > s or ( + self.lines and self.lines[last_stmt].l_no == self.lines[s].l_no + ): stmts.remove(s) continue j = self.prev[s] while code[j] == self.opc.JUMP_ABSOLUTE: j = self.prev[j] if ( - self.version >= (2, 3) and self.opname_for_offset(j) == "LIST_APPEND" + self.version >= (2, 3) + and self.opname_for_offset(j) == "LIST_APPEND" ): # list comprehension stmts.remove(s) continue @@ -925,7 +932,6 @@ class Scanner2(Scanner): # Is it an "and" inside an "if" or "while" block if op == self.opc.PJIF: - # Search for other POP_JUMP_IF_...'s targeting the # same target, of the current POP_JUMP_... instruction, # starting from current offset, and filter everything inside inner 'or' @@ -1117,7 +1123,6 @@ class Scanner2(Scanner): # Is this a loop and not an "if" statement? if (if_end < pre_rtarget) and (pre[if_end] in self.setup_loop_targets): - if if_end > start: return else: @@ -1467,11 +1472,12 @@ class Scanner2(Scanner): if __name__ == "__main__": import inspect + from xdis.version_info import PYTHON_VERSION_TRIPLE co = inspect.currentframe().f_code tokens, customize = Scanner2(PYTHON_VERSION_TRIPLE).ingest(co) for t in tokens: - print(t) + print(t) pass diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 46295765..3491a25c 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -35,20 +35,18 @@ Finally we save token information. from __future__ import print_function -from xdis import iscode, instruction_size, Instruction -from xdis.bytecode import _get_const_info +import sys -from uncompyle6.scanners.tok import Token -from uncompyle6.scanner import parse_fn_counts_30_35 -from uncompyle6.util import get_code_name import xdis # Get all the opcodes into globals import xdis.opcodes.opcode_33 as op3 +from xdis import Instruction, instruction_size, iscode +from xdis.bytecode import _get_const_info -from uncompyle6.scanner import Scanner, CONST_COLLECTIONS - -import sys +from uncompyle6.scanner import CONST_COLLECTIONS, Scanner, parse_fn_counts_30_35 +from uncompyle6.scanners.tok import Token +from uncompyle6.util import get_code_name intern = sys.intern @@ -261,7 +259,7 @@ class Scanner3(Scanner): opname="COLLECTION_START", attr=collection_enum, pattr=collection_type, - offset= "%s_0" % start_offset, + offset="%s_0" % start_offset, linestart=False, has_arg=True, has_extended_arg=False, @@ -296,7 +294,8 @@ class Scanner3(Scanner): return new_tokens def bound_map_from_inst( - self, insts: list, next_tokens: list, inst: Instruction, t: Token, i: int): + self, insts: list, next_tokens: list, inst: Instruction, t: Token, i: int + ): """ Try to a sequence of instruction that ends with a BUILD_MAP into a sequence that can be parsed much faster, but inserting the @@ -379,9 +378,7 @@ class Scanner3(Scanner): ) return new_tokens - def ingest( - self, co, classname=None, code_objects={}, show_asm=None - ): + def ingest(self, co, classname=None, code_objects={}, show_asm=None): """ Create "tokens" the bytecode of an Python code object. Largely these are the opcode name, but in some cases that has been modified to make parsing @@ -647,7 +644,9 @@ class Scanner3(Scanner): ) pattr = "%s positional, %s keyword only, %s annotated" % ( - pos_args, name_pair_args, annotate_args + pos_args, + name_pair_args, + annotate_args, ) if name_pair_args > 0 and annotate_args > 0: @@ -1542,10 +1541,10 @@ class Scanner3(Scanner): if __name__ == "__main__": - from xdis.version_info import PYTHON_VERSION_TRIPLE - import inspect + from xdis.version_info import PYTHON_VERSION_TRIPLE + co = inspect.currentframe().f_code tokens, customize = Scanner3(PYTHON_VERSION_TRIPLE).ingest(co) diff --git a/uncompyle6/scanners/scanner37.py b/uncompyle6/scanners/scanner37.py index 3206ba09..4a4f74b7 100644 --- a/uncompyle6/scanners/scanner37.py +++ b/uncompyle6/scanners/scanner37.py @@ -22,14 +22,13 @@ This sets up opcodes Python's 3.7 and calls a generalized scanner routine for Python 3. """ -from uncompyle6.scanner import CONST_COLLECTIONS -from uncompyle6.scanners.tok import Token - -from uncompyle6.scanners.scanner37base import Scanner37Base - # bytecode verification, verify(), uses JUMP_OPs from here from xdis.opcodes import opcode_37 as opc +from uncompyle6.scanner import CONST_COLLECTIONS +from uncompyle6.scanners.scanner37base import Scanner37Base +from uncompyle6.scanners.tok import Token + # bytecode verification, verify(), uses JUMP_OPS from here JUMP_OPs = opc.JUMP_OPS @@ -193,4 +192,6 @@ if __name__ == "__main__": print(t.format()) pass else: - print("Need to be Python 3.7 to demo; I am version %s." % version_tuple_to_str()) + print( + "Need to be Python 3.7 to demo; I am version %s." % version_tuple_to_str() + ) diff --git a/uncompyle6/scanners/scanner37base.py b/uncompyle6/scanners/scanner37base.py index ccbcec93..499538bd 100644 --- a/uncompyle6/scanners/scanner37base.py +++ b/uncompyle6/scanners/scanner37base.py @@ -29,18 +29,16 @@ For example: Finally we save token information. """ -from xdis import iscode, instruction_size, Instruction -from xdis.bytecode import _get_const_info +import sys -from uncompyle6.scanner import Token import xdis # Get all the opcodes into globals import xdis.opcodes.opcode_37 as op3 +from xdis import Instruction, instruction_size, iscode +from xdis.bytecode import _get_const_info -from uncompyle6.scanner import Scanner - -import sys +from uncompyle6.scanner import Scanner, Token globals().update(op3.opmap) @@ -252,7 +250,6 @@ class Scanner37Base(Scanner): n = len(self.insts) for i, inst in enumerate(self.insts): - # We need to detect the difference between: # raise AssertionError # and @@ -282,7 +279,6 @@ class Scanner37Base(Scanner): # To simplify things we want to untangle this. We also # do this loop before we compute jump targets. for i, inst in enumerate(self.insts): - # One artifact of the "too-small" operand problem, is that # some backward jumps, are turned into forward jumps to another # "extended arg" backward jump to the same location. @@ -319,7 +315,6 @@ class Scanner37Base(Scanner): j = 0 for i, inst in enumerate(self.insts): - argval = inst.argval op = inst.opcode @@ -707,9 +702,7 @@ class Scanner37Base(Scanner): # Finish filling the list for last statement slist += [codelen] * (codelen - len(slist)) - def detect_control_flow( - self, offset, targets, inst_index - ): + def detect_control_flow(self, offset, targets, inst_index): """ Detect type of block structures and their boundaries to fix optimized jumps in python2.3+ @@ -956,5 +949,7 @@ if __name__ == "__main__": for t in tokens: print(t) else: - print("Need to be Python 3.7 to demo; I am version %s." % version_tuple_to_str()) + print( + "Need to be Python 3.7 to demo; I am version %s." % version_tuple_to_str() + ) pass diff --git a/uncompyle6/semantics/customize.py b/uncompyle6/semantics/customize.py index c708ca37..e2bb3cda 100644 --- a/uncompyle6/semantics/customize.py +++ b/uncompyle6/semantics/customize.py @@ -17,15 +17,15 @@ """ from uncompyle6.parsers.treenode import SyntaxTree +from uncompyle6.scanners.tok import Token from uncompyle6.semantics.consts import ( INDENT_PER_LEVEL, NO_PARENTHESIS_EVER, PRECEDENCE, - TABLE_R, TABLE_DIRECT, + TABLE_R, ) from uncompyle6.semantics.helper import flatten_list -from uncompyle6.scanners.tok import Token def customize_for_version(self, is_pypy, version): @@ -87,7 +87,7 @@ def customize_for_version(self, is_pypy, version): if line_number != self.line_number: sep += "\n" + self.indent + INDENT_PER_LEVEL[:-1] pass - self.write("%s%s" (sep, value)) + self.write("%s%s" % (sep, value)) sep = ", " assert n >= len(kwargs_names) diff --git a/uncompyle6/semantics/customize38.py b/uncompyle6/semantics/customize38.py index dc3b1d31..36f89a20 100644 --- a/uncompyle6/semantics/customize38.py +++ b/uncompyle6/semantics/customize38.py @@ -23,8 +23,8 @@ from uncompyle6.semantics.consts import PRECEDENCE, TABLE_DIRECT from uncompyle6.semantics.customize37 import FSTRING_CONVERSION_MAP from uncompyle6.semantics.helper import escape_string, strip_quotes -def customize_for_version38(self, version): +def customize_for_version38(self, version): # FIXME: pytest doesn't add proper keys in testing. Reinstate after we have fixed pytest. # for lhs in 'for forelsestmt forelselaststmt ' # 'forelselaststmtc tryfinally38'.split(): @@ -40,10 +40,10 @@ def customize_for_version38(self, version): ), "async_forelse_stmt38": ( "%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n", - (7, 'store'), - (0, 'expr'), - (8, 'for_block'), - (-1, 'else_suite') + (7, "store"), + (0, "expr"), + (8, "for_block"), + (-1, "else_suite"), ), "async_with_stmt38": ( "%|async with %c:\n%+%c%-\n", @@ -70,8 +70,15 @@ def customize_for_version38(self, version): ), # Python 3.8 reverses the order of keys and items # from all prior versions of Python. - "dict_comp_body": ("%c: %c", (0, "expr"), (1, "expr"),), - "except_cond1a": ("%|except %c:\n", (1, "expr"),), + "dict_comp_body": ( + "%c: %c", + (0, "expr"), + (1, "expr"), + ), + "except_cond1a": ( + "%|except %c:\n", + (1, "expr"), + ), "except_cond_as": ( "%|except %c as %c:\n", (1, "expr"), @@ -124,7 +131,11 @@ def customize_for_version38(self, version): "pop_return": ("%|return %c\n", (1, "return_expr")), "popb_return": ("%|return %c\n", (0, "return_expr")), "pop_ex_return": ("%|return %c\n", (0, "return_expr")), - "set_for": (" for %c in %c", (2, "store"), (0, "expr_or_arg"),), + "set_for": ( + " for %c in %c", + (2, "store"), + (0, "expr_or_arg"), + ), "whilestmt38": ( "%|while %c:\n%+%c%-\n\n", (1, ("bool_op", "testexpr", "testexprc")), @@ -322,7 +333,9 @@ def customize_for_version38(self, version): f_conversion = self.traverse(formatted_value, indent="") # Remove leaving "f" and quotes conversion = strip_quotes(f_conversion[1:]) - f_str = "f%s" % escape_string(("%s%s" % (value_equal, conversion)) + post_str) + f_str = "f%s" % escape_string( + ("%s%s" % (value_equal, conversion)) + post_str + ) self.write(f_str) self.in_format_string = old_in_format_string diff --git a/uncompyle6/semantics/make_function1.py b/uncompyle6/semantics/make_function1.py index 4b47e1eb..7abb6368 100644 --- a/uncompyle6/semantics/make_function1.py +++ b/uncompyle6/semantics/make_function1.py @@ -17,16 +17,18 @@ All the crazy things we have to do to handle Python functions in Python before 3.0. The saga of changes continues in 3.0 and above and in other files. """ -from uncompyle6.scanner import Code -from uncompyle6.semantics.parser_error import ParserError +from xdis import iscode + from uncompyle6.parser import ParserError as ParserError2 +from uncompyle6.scanner import Code from uncompyle6.semantics.helper import ( - print_docstring, find_all_globals, find_globals_and_nonlocals, find_none, + print_docstring, ) -from xdis import iscode +from uncompyle6.semantics.parser_error import ParserError + def make_function1(self, node, is_lambda, nested=1, code_node=None): """ @@ -36,8 +38,8 @@ def make_function1(self, node, is_lambda, nested=1, code_node=None): def build_param(tree, param_names: list) -> tuple: """build parameters: - - handle defaults - - handle format tuple parameters + - handle defaults + - handle format tuple parameters """ # if formal parameter is a tuple, the parameter name # starts with a dot (eg. '.1', '.2') @@ -186,5 +188,5 @@ def make_function1(self, node, is_lambda, nested=1, code_node=None): tree, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn ) - code._tokens = None # save memory + code._tokens = None # save memory code._customize = None # save memory diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index bf364a46..27df0557 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -16,22 +16,12 @@ Custom Nonterminal action functions. See NonterminalActions docstring. """ -from uncompyle6.semantics.consts import ( - INDENT_PER_LEVEL, - NONE, - PRECEDENCE, - minint, -) - from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.scanners.tok import Token +from uncompyle6.semantics.consts import INDENT_PER_LEVEL, NONE, PRECEDENCE, minint +from uncompyle6.semantics.helper import find_code_node, flatten_list from uncompyle6.util import better_repr, get_code_name -from uncompyle6.semantics.helper import ( - find_code_node, - flatten_list, -) - class NonterminalActions: """ @@ -227,8 +217,10 @@ class NonterminalActions: else: # from trepan.api import debug; debug() raise TypeError( - ("Internal Error: n_const_list expects dict, list set, or set; got %s" - % lastnodetype) + ( + "Internal Error: n_const_list expects dict, list set, or set; got %s" + % lastnodetype + ) ) self.indent_more(INDENT_PER_LEVEL) diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index 605db2d7..cd57ca59 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -773,7 +773,8 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin): """ Expanding '%s' in template '%s[%s]': %s is invalid; has only %d entries - """ % (node.kind, entry, arg, index, len(node)) + """ + % (node.kind, entry, arg, index, len(node)) ) self.preorder(node[index]) @@ -1343,10 +1344,10 @@ def code_deparse( if expected_start: assert ( deparsed.ast == expected_start - ), ( - "Should have parsed grammar start to '%s'; got: %s" % - (expected_start, deparsed.ast.kind) - ) + ), "Should have parsed grammar start to '%s'; got: %s" % ( + expected_start, + deparsed.ast.kind, + ) # save memory del tokens