diff --git a/test/bytecode_3.8_run/05_long_literals.pyc b/test/bytecode_3.8_run/05_long_literals.pyc index d599a738..665b3767 100644 Binary files a/test/bytecode_3.8_run/05_long_literals.pyc and b/test/bytecode_3.8_run/05_long_literals.pyc differ diff --git a/test/simple_source/bug27+/01_argument_quoting.py b/test/simple_source/bug27+/01_argument_quoting.py new file mode 100644 index 00000000..3defbb60 --- /dev/null +++ b/test/simple_source/bug27+/01_argument_quoting.py @@ -0,0 +1,7 @@ +# Bug was erroneously putting quotes around Exception on decompilatoin +# RUNNABLE! + +"""This program is self-checking!""" +z = ["y", Exception] +assert z[0] == "y" +assert isinstance(z[1], Exception) diff --git a/test/simple_source/expression/05_long_literals.py b/test/simple_source/expression/05_long_literals.py index 74804f09..24c4e3c6 100644 --- a/test/simple_source/expression/05_long_literals.py +++ b/test/simple_source/expression/05_long_literals.py @@ -725,3 +725,10 @@ values = { } assert sorted(values.values())[1:] == list(range(2, 34)) + + +# Check that we can distinguish names from strings in literal collections, e.g. lists. +# The list has to have more than 4 items to get accumulated in a collection +a = ["y", 'Exception', "x", Exception, "z"] +assert a[1] == "Exception" +assert a[3] == Exception diff --git a/test/stdlib/2.6-exclude.sh b/test/stdlib/2.6-exclude.sh index 2ec213f8..657c6cf9 100644 --- a/test/stdlib/2.6-exclude.sh +++ b/test/stdlib/2.6-exclude.sh @@ -81,7 +81,7 @@ SKIP_TESTS=( [test_winreg.py]=1 # it fails on its own [test_winsound.py]=1 # it fails on its own - [test_zipimport_support.py]=1 + [test_zipimport_support.py]=1 # expected test to raise ImportError [test_zipfile64.py]=1 # Skip Long test # .pyenv/versions/2.6.9/lib/python2.6/lib2to3/refactor.pyc # .pyenv/versions/2.6.9/lib/python2.6/pyclbr.pyc diff --git a/test/stdlib/2.7-exclude.sh b/test/stdlib/2.7-exclude.sh index 16a05547..89890d1c 100644 --- a/test/stdlib/2.7-exclude.sh +++ b/test/stdlib/2.7-exclude.sh @@ -22,25 +22,20 @@ SKIP_TESTS=( [test_doctest2.py]=1 # Fails on its own [test_format.py]=1 # Control flow "and" vs nested "if" - [test_float.py]=1 - [test_grp.py]=1 # test takes to long, works interactively though [test_io.py]=1 # Test takes too long to run - [test_ioctl.py]=1 # Test takes too long to run - [test_lib2to3.py]=1 # test takes too long to run: 28 seconds [test_memoryio.py]=1 # FIX [test_multiprocessing.py]=1 # On uncompyle2, takes 24 secs - [test_poll.py]=1 # test takes too long to run: 11 seconds [test_regrtest.py]=1 # [test_runpy.py]=1 # Long and fails on its own [test_socket.py]=1 # Runs ok but takes 22 seconds - [test_ssl.py]=1 # + [test_ssl.py]=1 # Fails on its own [test_subprocess.py]=1 # Runs ok but takes 22 seconds [test_sys_settrace.py]=1 # Line numbers are expected to be different [test_traceback.py]=1 # Line numbers change - duh. [test_xpickle.py]=1 # Runs ok but takes 72 seconds [test_zipfile64.py]=1 # Runs ok but takes 204 seconds - [test_zipimport.py]=1 # + [test_zipimport.py]=1 # expected test to raise ImportError ) # 334 unit-test files in about 15 minutes diff --git a/uncompyle6/scanner.py b/uncompyle6/scanner.py index da94d6b1..fd8b08b2 100644 --- a/uncompyle6/scanner.py +++ b/uncompyle6/scanner.py @@ -174,11 +174,11 @@ class Scanner(object): has_extended_arg=False, ) ) - if tokens[j] == "LOAD_CONST": - opname = "ADD_VALUE" - else: - opname = "ADD_VALUE_VAR" for j in range(collection_start, i): + if tokens[j] == "LOAD_CONST": + opname = "ADD_VALUE" + else: + opname = "ADD_VALUE_VAR" new_tokens.append( Token( opname=opname, diff --git a/uncompyle6/scanners/scanner2.py b/uncompyle6/scanners/scanner2.py index 7fc6ba4f..44a5881b 100644 --- a/uncompyle6/scanners/scanner2.py +++ b/uncompyle6/scanners/scanner2.py @@ -321,7 +321,9 @@ class Scanner2(Scanner): "BUILD_SET", ): t = Token( - op_name, oparg, pattr, offset, self.linestarts.get(offset, None), op, has_arg, self.opc + op_name, oparg, pattr, offset, + self.linestarts.get(offset, None), + op, has_arg, self.opc ) collection_type = op_name.split("_")[1] next_tokens = self.bound_collection_from_tokens( diff --git a/uncompyle6/scanners/tok.py b/uncompyle6/scanners/tok.py index ace74a02..11327560 100644 --- a/uncompyle6/scanners/tok.py +++ b/uncompyle6/scanners/tok.py @@ -19,6 +19,7 @@ import re import sys intern = sys.intern +from typing import Union def off2int(offset, prefer_last=True): @@ -60,7 +61,7 @@ class Token: opname, attr=None, pattr=None, - offset=-1, + offset:Union[int, str]=-1, linestart=None, op=None, has_arg=None, diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index c9e63bef..20c19900 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -783,7 +783,7 @@ class NonterminalActions: def n_import_from(self, node): relative_path_index = 0 if self.version >= (2, 5): - if node[relative_path_index].attr > 0: + if node[relative_path_index].pattr > 0: node[2].pattr = ("." * node[relative_path_index].attr) + node[2].pattr if self.version > (2, 7): if isinstance(node[1].pattr, tuple): diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index 58f49ac4..fb2baeaf 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -1,4 +1,4 @@ -# Copyright (c) 2015-2022 by Rocky Bernstein +# Copyright (c) 2015-2023 by Rocky Bernstein # Copyright (c) 2005 by Dan Pascu # Copyright (c) 2000-2002 by hartmut Goebel # Copyright (c) 1999 John Aycock @@ -131,8 +131,6 @@ Python. import sys -IS_PYPY = "__pypy__" in sys.builtin_module_names - from spark_parser import GenericASTTraversal from xdis import COMPILER_FLAG_BIT, iscode from xdis.version_info import PYTHON_VERSION_TRIPLE @@ -143,7 +141,7 @@ from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.scanner import Code, get_scanner from uncompyle6.scanners.tok import Token from uncompyle6.semantics.check_ast import checker -from uncompyle6.semantics.consts import (ASSIGN_DOC_STRING, ASSIGN_TUPLE_PARAM, +from uncompyle6.semantics.consts import (ASSIGN_TUPLE_PARAM, INDENT_PER_LEVEL, LINE_LENGTH, MAP, MAP_DIRECT, NAME_MODULE, NONE, PASS, PRECEDENCE, RETURN_LOCALS, @@ -178,6 +176,8 @@ PARSER_DEFAULT_DEBUG = { "dups": False, } +IS_PYPY = "__pypy__" in sys.builtin_module_names + TREE_DEFAULT_DEBUG = {"before": False, "after": False} DEFAULT_DEBUG_OPTS = { @@ -978,7 +978,6 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin): return result # return self.traverse(node[1]) return "(" + name - raise Exception("Can't find tuple parameter " + name) def build_class(self, code): """Dump class definition, doc string and class body.""" @@ -1193,10 +1192,11 @@ class SourceWalker(GenericASTTraversal, NonterminalActions, ComprehensionMixin): del ast # Save memory return transform_tree - # The bytecode for the end of the main routine has a - # "return None". However, you can't issue a "return" statement in - # main. So as the old cigarette slogan goes: I'd rather switch (the token stream) - # than fight (with the grammar to not emit "return None"). + # The bytecode for the end of the main routine has a "return + # None". However, you can't issue a "return" statement in + # main. So as the old cigarette slogan goes: I'd rather switch + # (the token stream) than fight (with the grammar to not emit + # "return None"). if self.hide_internal: if len(tokens) >= 2 and not noneInNames: if tokens[-1].kind in ("RETURN_VALUE", "RETURN_VALUE_LAMBDA"): @@ -1257,6 +1257,7 @@ def code_deparse( assert iscode(co) + if version is None: version = PYTHON_VERSION_TRIPLE @@ -1328,16 +1329,11 @@ def code_deparse( assert not nonlocals - if version >= (3, 0): - load_op = "LOAD_STR" - else: - load_op = "LOAD_CONST" - # convert leading '__doc__ = "..." into doc string try: stmts = deparsed.ast - first_stmt = stmts[0][0] - if version >= 3.6: + first_stmt = stmts[0] + if version >= (3, 6): if first_stmt[0] == "SETUP_ANNOTATIONS": del stmts[0] assert stmts[0] == "sstmt" @@ -1345,13 +1341,13 @@ def code_deparse( first_stmt = stmts[0][0] pass pass - if first_stmt == ASSIGN_DOC_STRING(co.co_consts[0], load_op): + if first_stmt == "docstring": print_docstring(deparsed, "", co.co_consts[0]) del stmts[0] if stmts[-1] == RETURN_NONE: stmts.pop() # remove last node # todo: if empty, add 'pass' - except: + except Exception: pass deparsed.FUTURE_UNICODE_LITERALS = (