diff --git a/test/bytecode_2.7_run/05_long_literals.pyc b/test/bytecode_2.7_run/05_long_literals.pyc index d5d998e6..21d4540e 100644 Binary files a/test/bytecode_2.7_run/05_long_literals.pyc and b/test/bytecode_2.7_run/05_long_literals.pyc differ diff --git a/test/bytecode_3.8_run/05_long_literals.pyc b/test/bytecode_3.8_run/05_long_literals.pyc index d599a738..665b3767 100644 Binary files a/test/bytecode_3.8_run/05_long_literals.pyc and b/test/bytecode_3.8_run/05_long_literals.pyc differ diff --git a/test/simple_source/bug27+/01_argument_quoting.py b/test/simple_source/bug27+/01_argument_quoting.py new file mode 100644 index 00000000..3defbb60 --- /dev/null +++ b/test/simple_source/bug27+/01_argument_quoting.py @@ -0,0 +1,7 @@ +# Bug was erroneously putting quotes around Exception on decompilatoin +# RUNNABLE! + +"""This program is self-checking!""" +z = ["y", Exception] +assert z[0] == "y" +assert isinstance(z[1], Exception) diff --git a/test/simple_source/expression/05_long_literals.py b/test/simple_source/expression/05_long_literals.py index 74804f09..24c4e3c6 100644 --- a/test/simple_source/expression/05_long_literals.py +++ b/test/simple_source/expression/05_long_literals.py @@ -725,3 +725,10 @@ values = { } assert sorted(values.values())[1:] == list(range(2, 34)) + + +# Check that we can distinguish names from strings in literal collections, e.g. lists. +# The list has to have more than 4 items to get accumulated in a collection +a = ["y", 'Exception', "x", Exception, "z"] +assert a[1] == "Exception" +assert a[3] == Exception diff --git a/uncompyle6/scanner.py b/uncompyle6/scanner.py index a4b5bafc..052f10e9 100644 --- a/uncompyle6/scanner.py +++ b/uncompyle6/scanner.py @@ -175,11 +175,11 @@ class Scanner(object): has_extended_arg=False, ) ) - if tokens[j] == "LOAD_CONST": - opname = "ADD_VALUE" - else: - opname = "ADD_VALUE_VAR" for j in range(collection_start, i): + if tokens[j] == "LOAD_CONST": + opname = "ADD_VALUE" + else: + opname = "ADD_VALUE_VAR" new_tokens.append( Token( opname=opname, diff --git a/uncompyle6/scanners/scanner2.py b/uncompyle6/scanners/scanner2.py index 149a95e5..2484a764 100644 --- a/uncompyle6/scanners/scanner2.py +++ b/uncompyle6/scanners/scanner2.py @@ -316,7 +316,9 @@ class Scanner2(Scanner): "BUILD_SET", ): t = Token( - op_name, oparg, pattr, offset, self.linestarts.get(offset, None), op, has_arg, self.opc + op_name, oparg, pattr, offset, + self.linestarts.get(offset, None), + op, has_arg, self.opc ) collection_type = op_name.split("_")[1] next_tokens = self.bound_collection_from_tokens( diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index 88ac0665..c2f3cb18 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -786,7 +786,7 @@ class NonterminalActions: def n_import_from(self, node): relative_path_index = 0 if self.version >= (2, 5): - if node[relative_path_index].attr > 0: + if node[relative_path_index].pattr > 0: node[2].pattr = ("." * node[relative_path_index].attr) + node[2].pattr if self.version > (2, 7): if isinstance(node[1].pattr, tuple): diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index d2bb5f4e..c8a8b9d1 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -1,4 +1,4 @@ -# Copyright (c) 2015-2022 by Rocky Bernstein +# Copyright (c) 2015-2023 by Rocky Bernstein # Copyright (c) 2005 by Dan Pascu # Copyright (c) 2000-2002 by hartmut Goebel # Copyright (c) 1999 John Aycock @@ -131,8 +131,6 @@ Python. import sys -IS_PYPY = "__pypy__" in sys.builtin_module_names - from spark_parser import GenericASTTraversal from xdis import COMPILER_FLAG_BIT, iscode from xdis.version_info import PYTHON_VERSION_TRIPLE @@ -143,7 +141,7 @@ from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.scanner import Code, get_scanner from uncompyle6.scanners.tok import Token from uncompyle6.semantics.check_ast import checker -from uncompyle6.semantics.consts import (ASSIGN_DOC_STRING, ASSIGN_TUPLE_PARAM, +from uncompyle6.semantics.consts import (ASSIGN_TUPLE_PARAM, INDENT_PER_LEVEL, LINE_LENGTH, MAP, MAP_DIRECT, NAME_MODULE, NONE, PASS, PRECEDENCE, RETURN_LOCALS, @@ -190,6 +188,8 @@ PARSER_DEFAULT_DEBUG = { "dups": False, } +IS_PYPY = "__pypy__" in sys.builtin_module_names + TREE_DEFAULT_DEBUG = {"before": False, "after": False} DEFAULT_DEBUG_OPTS = { @@ -990,7 +990,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin): return result # return self.traverse(node[1]) return "(" + name - raise Exception("Can't find tuple parameter " + name) def build_class(self, code): """Dump class definition, doc string and class body.""" @@ -1206,10 +1205,11 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin): del ast # Save memory return transform_tree - # The bytecode for the end of the main routine has a - # "return None". However, you can't issue a "return" statement in - # main. So as the old cigarette slogan goes: I'd rather switch (the token stream) - # than fight (with the grammar to not emit "return None"). + # The bytecode for the end of the main routine has a "return + # None". However, you can't issue a "return" statement in + # main. So as the old cigarette slogan goes: I'd rather switch + # (the token stream) than fight (with the grammar to not emit + # "return None"). if self.hide_internal: if len(tokens) >= 2 and not noneInNames: if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"): @@ -1270,6 +1270,7 @@ def code_deparse( assert iscode(co) + if version is None: version = PYTHON_VERSION_TRIPLE @@ -1341,16 +1342,11 @@ def code_deparse( assert not nonlocals - if version >= (3, 0): - load_op = "LOAD_STR" - else: - load_op = "LOAD_CONST" - # convert leading '__doc__ = "..." into doc string try: stmts = deparsed.ast - first_stmt = stmts[0][0] - if (version >= (3, 6, 0)): + first_stmt = stmts[0] + if version >= (3, 6): if first_stmt[0] == "SETUP_ANNOTATIONS": del stmts[0] assert stmts[0] == "sstmt" @@ -1358,13 +1354,13 @@ def code_deparse( first_stmt = stmts[0][0] pass pass - if first_stmt == ASSIGN_DOC_STRING(co.co_consts[0], load_op): + if first_stmt == "docstring": print_docstring(deparsed, "", co.co_consts[0]) del stmts[0] if stmts[-1] == RETURN_NONE: stmts.pop() # remove last node # todo: if empty, add 'pass' - except: + except Exception: pass deparsed.FUTURE_UNICODE_LITERALS = (