From 4b296e1ead4d44c87447464717f548c6300a35b5 Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 30 Jun 2023 15:43:27 -0400 Subject: [PATCH 01/12] Correct generator function parsing for 3.3..3.5 --- admin-tools/setup-master.sh | 2 +- uncompyle6/bin/uncompile.py | 1 - uncompyle6/parsers/parse33.py | 2 +- uncompyle6/parsers/parse34.py | 2 ++ uncompyle6/parsers/parse36.py | 3 --- uncompyle6/scanners/scanner3.py | 18 ++++++++++------ uncompyle6/semantics/gencomp.py | 8 ++++--- uncompyle6/semantics/n_actions.py | 36 ++++++++++++++++++++----------- 8 files changed, 44 insertions(+), 28 deletions(-) diff --git a/admin-tools/setup-master.sh b/admin-tools/setup-master.sh index 9e00863c..181f857e 100755 --- a/admin-tools/setup-master.sh +++ b/admin-tools/setup-master.sh @@ -1,5 +1,5 @@ #!/bin/bash -PYTHON_VERSION=3.7.16 +PYTHON_VERSION=3.8.17 function checkout_version { local repo=$1 diff --git a/uncompyle6/bin/uncompile.py b/uncompyle6/bin/uncompile.py index c026db88..28f571f8 100755 --- a/uncompyle6/bin/uncompile.py +++ b/uncompyle6/bin/uncompile.py @@ -80,7 +80,6 @@ def usage(): def main_bin(): - current_bytecode_supported = True recurse_dirs = False numproc = 0 outfile = "-" diff --git a/uncompyle6/parsers/parse33.py b/uncompyle6/parsers/parse33.py index 2a4a4ef2..55432e72 100644 --- a/uncompyle6/parsers/parse33.py +++ b/uncompyle6/parsers/parse33.py @@ -15,12 +15,12 @@ class Python33Parser(Python32Parser): # Python 3.3+ adds yield from. expr ::= yield_from yield_from ::= expr expr YIELD_FROM + stmt ::= genexpr_func """ def customize_grammar_rules(self, tokens, customize): self.remove_rules(""" # 3.3+ adds POP_BLOCKS - genexpr_func ::= LOAD_ARG FOR_ITER store comp_iter JUMP_BACK whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK NOP COME_FROM_LOOP whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK NOP COME_FROM_LOOP """) diff --git a/uncompyle6/parsers/parse34.py b/uncompyle6/parsers/parse34.py index 0ff381fb..39b04082 100644 --- a/uncompyle6/parsers/parse34.py +++ b/uncompyle6/parsers/parse34.py @@ -52,6 +52,8 @@ class Python34Parser(Python33Parser): yield_from ::= expr GET_ITER LOAD_CONST YIELD_FROM _ifstmts_jump ::= c_stmts_opt JUMP_ABSOLUTE JUMP_FORWARD COME_FROM + + genexpr_func ::= LOAD_ARG _come_froms FOR_ITER store comp_iter JUMP_BACK """ def customize_grammar_rules(self, tokens, customize): diff --git a/uncompyle6/parsers/parse36.py b/uncompyle6/parsers/parse36.py index 36d65c3c..fd79035f 100644 --- a/uncompyle6/parsers/parse36.py +++ b/uncompyle6/parsers/parse36.py @@ -191,9 +191,6 @@ class Python36Parser(Python35Parser): COME_FROM_FINALLY compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD - - stmt ::= genexpr_func - genexpr_func ::= LOAD_ARG _come_froms FOR_ITER store comp_iter JUMP_BACK """ # Some of this is duplicated from parse37. Eventually we'll probably rebase from diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 1cf060e0..30539a7d 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -39,6 +39,7 @@ from typing import Optional, Tuple from xdis import iscode, instruction_size, Instruction from xdis.bytecode import _get_const_info +from xdis.codetype import UnicodeForPython3 from uncompyle6.scanners.tok import Token from uncompyle6.scanner import parse_fn_counts_30_35 @@ -572,16 +573,19 @@ class Scanner3(Scanner): if op in self.opc.CONST_OPS: const = argval if iscode(const): - if const.co_name == "": + co_name = const.co_name + if isinstance(const.co_name, UnicodeForPython3): + co_name = const.co_name.value.decode("utf-8") + if co_name == "": assert opname == "LOAD_CONST" opname = "LOAD_LAMBDA" - elif const.co_name == "": + elif co_name == "": opname = "LOAD_GENEXPR" - elif const.co_name == "": + elif co_name == "": opname = "LOAD_DICTCOMP" - elif const.co_name == "": + elif co_name == "": opname = "LOAD_SETCOMP" - elif const.co_name == "": + elif co_name == "": opname = "LOAD_LISTCOMP" else: opname = "LOAD_CODE" @@ -589,8 +593,8 @@ class Scanner3(Scanner): # now holds Code(const) and thus can not be used # for comparison (todo: think about changing this) # pattr = 'code_object @ 0x%x %s->%s' %\ - # (id(const), const.co_filename, const.co_name) - pattr = "" + # (id(const), const.co_filename, co_name) + pattr = "" elif isinstance(const, str): opname = "LOAD_STR" else: diff --git a/uncompyle6/semantics/gencomp.py b/uncompyle6/semantics/gencomp.py index c1018c90..936b19e2 100644 --- a/uncompyle6/semantics/gencomp.py +++ b/uncompyle6/semantics/gencomp.py @@ -174,9 +174,11 @@ class ComprehensionMixin: tree = tree[1] pass - if tree in ("genexpr_func_async",): - if tree[3] == "comp_iter": - iter_index = 3 + if tree in ("genexpr_func", "genexpr_func_async",): + for i in range(3, 5): + if tree[i] == "comp_iter": + iter_index = i + break n = tree[iter_index] diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index 44cf12c7..d61cf517 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -146,7 +146,6 @@ class NonterminalActions: self.prune() # stop recursing def n_classdef(self, node): - if self.version >= (3, 6): self.n_classdef36(node) elif self.version >= (3, 0): @@ -228,8 +227,10 @@ class NonterminalActions: else: # from trepan.api import debug; debug() raise TypeError( - ("Internal Error: n_const_list expects dict, list set, or set; got " - f"{lastnodetype}") + ( + "Internal Error: n_const_list expects dict, list set, or set; got " + f"{lastnodetype}" + ) ) self.indent_more(INDENT_PER_LEVEL) @@ -521,7 +522,6 @@ class NonterminalActions: self.prune() def n_docstring(self, node): - indent = self.indent doc_node = node[0] if doc_node.attr: @@ -543,7 +543,7 @@ class NonterminalActions: self.write(indent) docstring = repr(docstring.expandtabs())[1:-1] - for (orig, replace) in ( + for orig, replace in ( ("\\\\", "\t"), ("\\r\\n", "\n"), ("\\n", "\n"), @@ -701,8 +701,11 @@ class NonterminalActions: self.write("(") iter_index = 3 if self.version > (3, 2): - if self.version >= (3, 6): - if node[0].kind in ("load_closure", "load_genexpr") and self.version >= (3, 8): + if self.version >= (3, 4): + if node[0].kind in ( + "load_closure", + "load_genexpr", + ) and self.version >= (3, 8): code_index = -6 is_lambda = self.is_lambda if node[0].kind == "load_genexpr": @@ -710,13 +713,20 @@ class NonterminalActions: self.closure_walk(node, collection_index=4) self.is_lambda = is_lambda else: - # Python 3.7+ adds optional "come_froms" at node[0] so count from the end + # Python 3.7+ adds optional "come_froms" at node[0] so count from + # the end. if node == "generator_exp_async" and self.version[:2] == (3, 6): code_index = 0 else: code_index = -6 - iter_index = 4 if self.version < (3, 8) else 3 - self.comprehension_walk(node, iter_index=iter_index, code_index=code_index) + iter_index = ( + 4 + if self.version < (3, 8) and not isinstance(node[4], Token) + else 3 + ) + self.comprehension_walk( + node, iter_index=iter_index, code_index=code_index + ) pass pass else: @@ -1028,7 +1038,6 @@ class NonterminalActions: self.prune() def n_mkfunc(self, node): - code_node = find_code_node(node, -2) code = code_node.attr self.write(code.co_name) @@ -1076,7 +1085,10 @@ class NonterminalActions: else: # We can't comment out like above because there may be a trailing ')' # that needs to be written - assert len(node) == 3 and node[2] in ("RETURN_VALUE_LAMBDA", "LAMBDA_MARKER") + assert len(node) == 3 and node[2] in ( + "RETURN_VALUE_LAMBDA", + "LAMBDA_MARKER", + ) self.preorder(node[0]) self.prune() From 3f21b2a1155f6889053812302878d881c2eacb54 Mon Sep 17 00:00:00 2001 From: "R. Bernstein" Date: Fri, 30 Jun 2023 16:34:55 -0400 Subject: [PATCH 02/12] Update build to large resource class in config.yml --- .circleci/config.yml | 111 ++++++++++++++++++++++--------------------- 1 file changed, 57 insertions(+), 54 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2ab04f44..ad879b5a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,60 +18,63 @@ jobs: docker: - image: circleci/python:3.6.9 steps: - # Machine Setup - # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each - # The following `checkout` command checks out your code to your working directory. In 1.0 we did this implicitly. In 2.0 you can choose where in the course of a job your code should be checked out. - - checkout - # Prepare for artifact and test results collection equivalent to how it was done on 1.0. - # In many cases you can simplify this from what is generated here. - # 'See docs on artifact collection here https://circleci.com/docs/2.0/artifacts/' - - run: mkdir -p $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS - # This is based on your 1.0 configuration file or project settings - - run: - working_directory: ~/rocky/python-uncompyle6 - command: pip install --user virtualenv && pip install --user nose && pip install --user pep8 - # Dependencies - # This would typically go in either a build or a build-and-test job when using workflows - # Restore the dependency cache - - restore_cache: - keys: - - v2-dependencies-{{ .Branch }}- - # fallback to using the latest cache if no exact match is found - - v2-dependencies- + # Machine Setup + # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each + # The following `checkout` command checks out your code to your working directory. In 1.0 we did this implicitly. In 2.0 you can choose where in the course of a job your code should be checked out. + - checkout + # Prepare for artifact and test results collection equivalent to how it was done on 1.0. + # In many cases you can simplify this from what is generated here. + # 'See docs on artifact collection here https://circleci.com/docs/2.0/artifacts/' + - run: mkdir -p $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS + # This is based on your 1.0 configuration file or project settings + - run: + working_directory: ~/rocky/python-uncompyle6 + command: pip install --user virtualenv && pip install --user nose && pip install + --user pep8 + # Dependencies + # This would typically go in either a build or a build-and-test job when using workflows + # Restore the dependency cache + - restore_cache: + keys: + - v2-dependencies-{{ .Branch }}- + # fallback to using the latest cache if no exact match is found + - v2-dependencies- - - run: - command: | # Use pip to install dependengcies - pip install --user --upgrade setuptools - # Until the next release - pip install git+https://github.com/rocky/python-xdis#egg=xdis - pip install --user -e . - pip install --user -r requirements-dev.txt + - run: + command: | # Use pip to install dependengcies + pip install --user --upgrade setuptools + # Until the next release + pip install git+https://github.com/rocky/python-xdis#egg=xdis + pip install --user -e . + pip install --user -r requirements-dev.txt - # Save dependency cache - - save_cache: - key: v2-dependencies-{{ .Branch }}-{{ epoch }} - paths: - # This is a broad list of cache paths to include many possible development environments - # You can probably delete some of these entries - - vendor/bundle - - ~/virtualenvs - - ~/.m2 - - ~/.ivy2 - - ~/.bundle - - ~/.cache/bower + # Save dependency cache + - save_cache: + key: v2-dependencies-{{ .Branch }}-{{ epoch }} + paths: + # This is a broad list of cache paths to include many possible development environments + # You can probably delete some of these entries + - vendor/bundle + - ~/virtualenvs + - ~/.m2 + - ~/.ivy2 + - ~/.bundle + - ~/.cache/bower - # Test - # This would typically be a build job when using workflows, possibly combined with build - # This is based on your 1.0 configuration file or project settings - - run: sudo python ./setup.py develop && make check-3.6 - - run: cd ./test/stdlib && bash ./runtests.sh 'test_[p-z]*.py' - # Teardown - # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each - # Save test results - - store_test_results: - path: /tmp/circleci-test-results - # Save artifacts - - store_artifacts: - path: /tmp/circleci-artifacts - - store_artifacts: - path: /tmp/circleci-test-results + # Test + # This would typically be a build job when using workflows, possibly combined with build + # This is based on your 1.0 configuration file or project settings + - run: sudo python ./setup.py develop && make check-3.6 + - run: cd ./test/stdlib && bash ./runtests.sh 'test_[p-z]*.py' + # Teardown + # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each + # Save test results + - store_test_results: + path: /tmp/circleci-test-results + # Save artifacts + - store_artifacts: + path: /tmp/circleci-artifacts + - store_artifacts: + path: /tmp/circleci-test-results + # The resource_class feature allows configuring CPU and RAM resources for each job. Different resource classes are available for different executors. https://circleci.com/docs/2.0/configuration-reference/#resourceclass + resource_class: large From 1c28bc1c824d054d8ffa6994264d5724b147ab6b Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 30 Jun 2023 16:38:20 -0400 Subject: [PATCH 03/12] Update Python version and exdis version --- .circleci/config.yml | 2 +- __pkginfo__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ad879b5a..ca59be3b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ jobs: # To see the list of pre-built images that CircleCI provides for most common languages see # https://circleci.com/docs/2.0/circleci-images/ docker: - - image: circleci/python:3.6.9 + - image: circleci/python:3.8.17 steps: # Machine Setup # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each diff --git a/__pkginfo__.py b/__pkginfo__.py index 4b6d9fe6..723c15a3 100644 --- a/__pkginfo__.py +++ b/__pkginfo__.py @@ -75,7 +75,7 @@ entry_points = { ] } ftp_url = None -install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.2, < 6.2.0"] +install_requires = ["spark-parser >= 1.8.9, < 1.9.0", "xdis >= 6.0.8, < 6.2.0"] license = "GPL3" mailing_list = "python-debugger@googlegroups.com" From 120b66b89ee40d9f427e5bf051c1468d9bf2946a Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 30 Jun 2023 16:41:32 -0400 Subject: [PATCH 04/12] Try Python 3.8 as base image --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ca59be3b..794a1084 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ jobs: # To see the list of pre-built images that CircleCI provides for most common languages see # https://circleci.com/docs/2.0/circleci-images/ docker: - - image: circleci/python:3.8.17 + - image: circleci/python:3.8 steps: # Machine Setup # If you break your build into multiple jobs with workflows, you will probably want to do the parts of this that are relevant in each From dc286b91c8a894da7f1947b61fd67c30920ff466 Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 30 Jun 2023 16:46:03 -0400 Subject: [PATCH 05/12] pip woes --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 794a1084..f05ee99b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -42,9 +42,9 @@ jobs: - run: command: | # Use pip to install dependengcies - pip install --user --upgrade setuptools + sudo pip install --user --upgrade setuptools # Until the next release - pip install git+https://github.com/rocky/python-xdis#egg=xdis + sudo pip install git+https://github.com/rocky/python-xdis#egg=xdis pip install --user -e . pip install --user -r requirements-dev.txt From d6608712f12edeaad663b409823d1dcca475a2bb Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 30 Jun 2023 20:30:06 -0400 Subject: [PATCH 06/12] correct fn name on older 3.x cross decompile... Also black, lint, and isort some --- uncompyle6/scanners/scanner3.py | 135 +++++++++++++------------ uncompyle6/semantics/aligner.py | 147 +++++++++++++++++----------- uncompyle6/semantics/customize3.py | 82 +++++++++------- uncompyle6/semantics/customize36.py | 5 +- uncompyle6/semantics/fragments.py | 96 +++++++++--------- uncompyle6/semantics/n_actions.py | 4 +- uncompyle6/util.py | 6 ++ 7 files changed, 269 insertions(+), 206 deletions(-) diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 30539a7d..69ab2749 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -39,10 +39,10 @@ from typing import Optional, Tuple from xdis import iscode, instruction_size, Instruction from xdis.bytecode import _get_const_info -from xdis.codetype import UnicodeForPython3 from uncompyle6.scanners.tok import Token from uncompyle6.scanner import parse_fn_counts_30_35 +from uncompyle6.util import get_code_name import xdis # Get all the opcodes into globals @@ -209,11 +209,18 @@ class Scanner3(Scanner): return def bound_collection_from_inst( - self, insts: list, next_tokens: list, inst: Instruction, t: Token, i: int, collection_type: str + self, + insts: list, + next_tokens: list, + inst: Instruction, + t: Token, + i: int, + collection_type: str, ) -> Optional[list]: """ - Try to a replace sequence of instruction that ends with a BUILD_xxx with a sequence that can - be parsed much faster, but inserting the token boundary at the beginning of the sequence. + Try to a replace sequence of instruction that ends with a + BUILD_xxx with a sequence that can be parsed much faster, but + inserting the token boundary at the beginning of the sequence. """ count = t.attr assert isinstance(count, int) @@ -291,10 +298,12 @@ class Scanner3(Scanner): return new_tokens def bound_map_from_inst( - self, insts: list, next_tokens: list, inst: Instruction, t: Token, i: int) -> Optional[list]: + self, insts: list, next_tokens: list, inst: Instruction, t: Token, i: int + ) -> Optional[list]: """ - Try to a sequence of instruction that ends with a BUILD_MAP into a sequence that can - be parsed much faster, but inserting the token boundary at the beginning of the sequence. + Try to a sequence of instruction that ends with a BUILD_MAP into + a sequence that can be parsed much faster, but inserting the + token boundary at the beginning of the sequence. """ count = t.attr assert isinstance(count, int) @@ -309,21 +318,18 @@ class Scanner3(Scanner): assert (count * 2) <= i for j in range(collection_start, i, 2): - if insts[j].opname not in ( - "LOAD_CONST", - ): + if insts[j].opname not in ("LOAD_CONST",): return None - if insts[j+1].opname not in ( - "LOAD_CONST", - ): + if insts[j + 1].opname not in ("LOAD_CONST",): return None collection_start = i - (2 * count) collection_enum = CONST_COLLECTIONS.index("CONST_MAP") - # If we get here, all instructions before tokens[i] are LOAD_CONST and we can replace - # add a boundary marker and change LOAD_CONST to something else - new_tokens = next_tokens[:-(2*count)] + # If we get here, all instructions before tokens[i] are LOAD_CONST and + # we can replace add a boundary marker and change LOAD_CONST to + # something else. + new_tokens = next_tokens[: -(2 * count)] start_offset = insts[collection_start].offset new_tokens.append( Token( @@ -353,10 +359,10 @@ class Scanner3(Scanner): new_tokens.append( Token( opname="ADD_VALUE", - attr=insts[j+1].argval, - pattr=insts[j+1].argrepr, - offset=insts[j+1].offset, - linestart=insts[j+1].starts_line, + attr=insts[j + 1].argval, + pattr=insts[j + 1].argrepr, + offset=insts[j + 1].offset, + linestart=insts[j + 1].starts_line, has_arg=True, has_extended_arg=False, opc=self.opc, @@ -376,8 +382,9 @@ class Scanner3(Scanner): ) return new_tokens - def ingest(self, co, classname=None, code_objects={}, show_asm=None - ) -> Tuple[list, dict]: + def ingest( + self, co, classname=None, code_objects={}, show_asm=None + ) -> Tuple[list, dict]: """ Create "tokens" the bytecode of an Python code object. Largely these are the opcode name, but in some cases that has been modified to make parsing @@ -387,14 +394,17 @@ class Scanner3(Scanner): Some transformations are made to assist the deparsing grammar: - various types of LOAD_CONST's are categorized in terms of what they load - COME_FROM instructions are added to assist parsing control structures - - operands with stack argument counts or flag masks are appended to the opcode name, e.g.: + - operands with stack argument counts or flag masks are appended to the + opcode name, e.g.: * BUILD_LIST, BUILD_SET - * MAKE_FUNCTION and FUNCTION_CALLS append the number of positional arguments + * MAKE_FUNCTION and FUNCTION_CALLS append the number of positional + arguments - EXTENDED_ARGS instructions are removed - Also, when we encounter certain tokens, we add them to a set which will cause custom - grammar rules. Specifically, variable arg tokens like MAKE_FUNCTION or BUILD_LIST - cause specific rules for the specific number of arguments they take. + Also, when we encounter certain tokens, we add them to a set + which will cause custom grammar rules. Specifically, variable + arg tokens like MAKE_FUNCTION or BUILD_LIST cause specific rules + for the specific number of arguments they take. """ if not show_asm: @@ -420,7 +430,6 @@ class Scanner3(Scanner): n = len(self.insts) for i, inst in enumerate(self.insts): - opname = inst.opname # We need to detect the difference between: # raise AssertionError @@ -437,12 +446,12 @@ class Scanner3(Scanner): prev_inst = self.insts[i - 1] assert_can_follow = ( prev_inst.opname in ("JUMP_IF_TRUE", "JUMP_IF_FALSE") - and i + 1 < n ) + and i + 1 < n + ) jump_if_inst = prev_inst else: assert_can_follow = ( - opname in ("POP_JUMP_IF_TRUE", "POP_JUMP_IF_FALSE") - and i + 1 < n + opname in ("POP_JUMP_IF_TRUE", "POP_JUMP_IF_FALSE") and i + 1 < n ) jump_if_inst = inst if assert_can_follow: @@ -452,7 +461,9 @@ class Scanner3(Scanner): and next_inst.argval == "AssertionError" and jump_if_inst.argval ): - raise_idx = self.offset2inst_index[self.prev_op[jump_if_inst.argval]] + raise_idx = self.offset2inst_index[ + self.prev_op[jump_if_inst.argval] + ] raise_inst = self.insts[raise_idx] if raise_inst.opname.startswith("RAISE_VARARGS"): self.load_asserts.add(next_inst.offset) @@ -468,22 +479,21 @@ class Scanner3(Scanner): new_tokens = [] for i, inst in enumerate(self.insts): - opname = inst.opname argval = inst.argval pattr = inst.argrepr t = Token( - opname=opname, - attr=argval, - pattr=pattr, - offset=inst.offset, - linestart=inst.starts_line, - op=inst.opcode, - has_arg=inst.has_arg, - has_extended_arg=inst.has_extended_arg, - opc=self.opc, - ) + opname=opname, + attr=argval, + pattr=pattr, + offset=inst.offset, + linestart=inst.starts_line, + op=inst.opcode, + has_arg=inst.has_arg, + has_extended_arg=inst.has_extended_arg, + opc=self.opc, + ) # things that smash new_tokens like BUILD_LIST have to come first. if opname in ( @@ -502,11 +512,13 @@ class Scanner3(Scanner): if try_tokens is not None: new_tokens = try_tokens continue - elif opname in ( - "BUILD_MAP", - ): + elif opname in ("BUILD_MAP",): try_tokens = self.bound_map_from_inst( - self.insts, new_tokens, inst, t, i, + self.insts, + new_tokens, + inst, + t, + i, ) if try_tokens is not None: new_tokens = try_tokens @@ -573,9 +585,7 @@ class Scanner3(Scanner): if op in self.opc.CONST_OPS: const = argval if iscode(const): - co_name = const.co_name - if isinstance(const.co_name, UnicodeForPython3): - co_name = const.co_name.value.decode("utf-8") + co_name = get_code_name(const) if co_name == "": assert opname == "LOAD_CONST" opname = "LOAD_LAMBDA" @@ -629,7 +639,7 @@ class Scanner3(Scanner): else: pos_args, name_pair_args, annotate_args = parse_fn_counts_30_35( inst.argval - ) + ) pattr = f"{pos_args} positional, {name_pair_args} keyword only, {annotate_args} annotated" @@ -715,11 +725,13 @@ class Scanner3(Scanner): and self.insts[i + 1].opname == "JUMP_FORWARD" ) - if (self.version[:2] == (3, 0) and self.insts[i + 1].opname == "JUMP_FORWARD" - and not is_continue): + if ( + self.version[:2] == (3, 0) + and self.insts[i + 1].opname == "JUMP_FORWARD" + and not is_continue + ): target_prev = self.offset2inst_index[self.prev_op[target]] - is_continue = ( - self.insts[target_prev].opname == "SETUP_LOOP") + is_continue = self.insts[target_prev].opname == "SETUP_LOOP" if is_continue or ( inst.offset in self.stmts @@ -736,7 +748,10 @@ class Scanner3(Scanner): # the "continue" is not on a new line. # There are other situations where we don't catch # CONTINUE as well. - if new_tokens[-1].kind == "JUMP_BACK" and new_tokens[-1].attr <= argval: + if ( + new_tokens[-1].kind == "JUMP_BACK" + and new_tokens[-1].attr <= argval + ): if new_tokens[-2].kind == "BREAK_LOOP": del new_tokens[-1] else: @@ -809,7 +824,10 @@ class Scanner3(Scanner): if inst.has_arg: label = self.fixed_jumps.get(offset) oparg = inst.arg - if self.version >= (3, 6) and self.code[offset] == self.opc.EXTENDED_ARG: + if ( + self.version >= (3, 6) + and self.code[offset] == self.opc.EXTENDED_ARG + ): j = xdis.next_offset(op, self.opc, offset) next_offset = xdis.next_offset(op, self.opc, j) else: @@ -1082,7 +1100,6 @@ class Scanner3(Scanner): and (target > offset) and pretarget.offset != offset ): - # FIXME: hack upon hack... # In some cases the pretarget can be a jump to the next instruction # and these aren't and/or's either. We limit to 3.5+ since we experienced there @@ -1104,7 +1121,6 @@ class Scanner3(Scanner): # Is it an "and" inside an "if" or "while" block if op == self.opc.POP_JUMP_IF_FALSE: - # Search for another POP_JUMP_IF_FALSE targetting the same op, # in current statement, starting from current offset, and filter # everything inside inner 'or' jumps and midline ifs @@ -1357,7 +1373,6 @@ class Scanner3(Scanner): self.fixed_jumps[offset] = rtarget self.not_continue.add(pre_rtarget) else: - # FIXME: this is very convoluted and based on rather hacky # empirical evidence. It should go a way when # we have better control-flow analysis diff --git a/uncompyle6/semantics/aligner.py b/uncompyle6/semantics/aligner.py index 1cc68a56..2db50bc2 100644 --- a/uncompyle6/semantics/aligner.py +++ b/uncompyle6/semantics/aligner.py @@ -1,4 +1,4 @@ -# Copyright (c) 2018, 2022 by Rocky Bernstein +# Copyright (c) 2018, 2022-2023 by Rocky Bernstein # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -14,41 +14,63 @@ # along with this program. If not, see . import sys -from uncompyle6.semantics.pysource import ( - SourceWalker, SourceWalkerError, find_globals, ASSIGN_DOC_STRING, RETURN_NONE) from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG -from uncompyle6 import IS_PYPY +from xdis import iscode + +from xdis.version_info import IS_PYPY +from uncompyle6.scanner import get_scanner +from uncompyle6.semantics.pysource import ( + ASSIGN_DOC_STRING, + RETURN_NONE, + SourceWalker, + SourceWalkerError, + find_globals_and_nonlocals +) +from uncompyle6.show import maybe_show_asm + +# + class AligningWalker(SourceWalker, object): - def __init__(self, version, out, scanner, showast=False, - debug_parser=PARSER_DEFAULT_DEBUG, - compile_mode='exec', is_pypy=False): - SourceWalker.__init__(self, version, out, scanner, showast, debug_parser, - compile_mode, is_pypy) + def __init__( + self, + version, + out, + scanner, + showast=False, + debug_parser=PARSER_DEFAULT_DEBUG, + compile_mode="exec", + is_pypy=False, + ): + SourceWalker.__init__( + self, version, out, scanner, showast, debug_parser, compile_mode, is_pypy + ) self.desired_line_number = 0 self.current_line_number = 0 def println(self, *data): - if data and not(len(data) == 1 and data[0] == ''): + if data and not (len(data) == 1 and data[0] == ""): self.write(*data) self.pending_newlines = max(self.pending_newlines, 1) def write(self, *data): if (len(data) == 1) and data[0] == self.indent: - diff = max(self.pending_newlines, - self.desired_line_number - self.current_line_number) - self.f.write('\n'*diff) + diff = max( + self.pending_newlines, + self.desired_line_number - self.current_line_number, + ) + self.f.write("\n" * diff) self.current_line_number += diff self.pending_newlines = 0 - if (len(data) == 0) or (len(data) == 1 and data[0] == ''): + if (len(data) == 0) or (len(data) == 1 and data[0] == ""): return - out = ''.join((str(j) for j in data)) + out = "".join((str(j) for j in data)) n = 0 for i in out: - if i == '\n': + if i == "\n": n += 1 if n == len(out): self.pending_newlines = max(self.pending_newlines, n) @@ -61,25 +83,27 @@ class AligningWalker(SourceWalker, object): break if self.pending_newlines > 0: - diff = max(self.pending_newlines, - self.desired_line_number - self.current_line_number) - self.f.write('\n'*diff) + diff = max( + self.pending_newlines, + self.desired_line_number - self.current_line_number, + ) + self.f.write("\n" * diff) self.current_line_number += diff self.pending_newlines = 0 for i in out[::-1]: - if i == '\n': + if i == "\n": self.pending_newlines += 1 else: break if self.pending_newlines: - out = out[:-self.pending_newlines] + out = out[: -self.pending_newlines] self.f.write(out) def default(self, node): mapping = self._get_mapping(node) - if hasattr(node, 'linestart'): + if hasattr(node, "linestart"): if node.linestart: self.desired_line_number = node.linestart table = mapping[0] @@ -90,25 +114,22 @@ class AligningWalker(SourceWalker, object): pass if key.type in table: - self.engine(table[key.type], node) + self.template_engine(table[key.type], node) self.prune() -from xdis import iscode -from uncompyle6.scanner import get_scanner -from uncompyle6.show import ( - maybe_show_asm, -) -# -DEFAULT_DEBUG_OPTS = { - 'asm': False, - 'tree': False, - 'grammar': False -} +DEFAULT_DEBUG_OPTS = {"asm": False, "tree": False, "grammar": False} -def code_deparse_align(co, out=sys.stderr, version=None, is_pypy=None, - debug_opts=DEFAULT_DEBUG_OPTS, - code_objects={}, compile_mode='exec'): + +def code_deparse_align( + co, + out=sys.stderr, + version=None, + is_pypy=None, + debug_opts=DEFAULT_DEBUG_OPTS, + code_objects={}, + compile_mode="exec", +): """ ingests and deparses a given code block 'co' """ @@ -120,61 +141,73 @@ def code_deparse_align(co, out=sys.stderr, version=None, is_pypy=None, if is_pypy is None: is_pypy = IS_PYPY - # store final output stream for case of error scanner = get_scanner(version, is_pypy=is_pypy) tokens, customize = scanner.ingest(co, code_objects=code_objects) - show_asm = debug_opts.get('asm', None) + show_asm = debug_opts.get("asm", None) maybe_show_asm(show_asm, tokens) debug_parser = dict(PARSER_DEFAULT_DEBUG) - show_grammar = debug_opts.get('grammar', None) - show_grammar = debug_opts.get('grammar', None) + show_grammar = debug_opts.get("grammar", None) + show_grammar = debug_opts.get("grammar", None) if show_grammar: - debug_parser['reduce'] = show_grammar - debug_parser['errorstack'] = True + debug_parser["reduce"] = show_grammar + debug_parser["errorstack"] = True # Build a parse tree from tokenized and massaged disassembly. - show_ast = debug_opts.get('ast', None) - deparsed = AligningWalker(version, scanner, out, showast=show_ast, - debug_parser=debug_parser, compile_mode=compile_mode, - is_pypy = is_pypy) + show_ast = debug_opts.get("ast", None) + deparsed = AligningWalker( + version, + scanner, + out, + showast=show_ast, + debug_parser=debug_parser, + compile_mode=compile_mode, + is_pypy=is_pypy, + ) - is_top_level_module = co.co_name == '' - deparsed.ast = deparsed.build_ast(tokens, customize, co, is_top_level_module=is_top_level_module) + is_top_level_module = co.co_name == "" + deparsed.ast = deparsed.build_ast( + tokens, customize, co, is_top_level_module=is_top_level_module + ) - assert deparsed.ast == 'stmts', 'Should have parsed grammar start' + assert deparsed.ast == "stmts", "Should have parsed grammar start" - del tokens # save memory + del tokens # save memory - deparsed.mod_globs = find_globals(deparsed.ast, set()) + (deparsed.mod_globs, _) = find_globals_and_nonlocals( + deparsed.ast, set(), set(), co, version + ) # convert leading '__doc__ = "..." into doc string try: if deparsed.ast[0][0] == ASSIGN_DOC_STRING(co.co_consts[0]): - deparsed.print_docstring('', co.co_consts[0]) + deparsed.print_docstring("", co.co_consts[0]) del deparsed.ast[0] if deparsed.ast[-1] == RETURN_NONE: - deparsed.ast.pop() # remove last node + deparsed.ast.pop() # remove last node # todo: if empty, add 'pass' - except: + except Exception: pass # What we've been waiting for: Generate Python source from the parse tree! deparsed.gen_source(deparsed.ast, co.co_name, customize) for g in sorted(deparsed.mod_globs): - deparsed.write('# global %s ## Warning: Unused global\n' % g) + deparsed.write("# global %s ## Warning: Unused global\n" % g) if deparsed.ERROR: raise SourceWalkerError("Deparsing stopped due to parse error") return deparsed -if __name__ == '__main__': + +if __name__ == "__main__": + def deparse_test(co): "This is a docstring" deparsed = code_deparse_align(co) print(deparsed.text) return + deparse_test(deparse_test.__code__) diff --git a/uncompyle6/semantics/customize3.py b/uncompyle6/semantics/customize3.py index 2f388893..798ba2f3 100644 --- a/uncompyle6/semantics/customize3.py +++ b/uncompyle6/semantics/customize3.py @@ -1,4 +1,4 @@ -# Copyright (c) 2018-2021 by Rocky Bernstein +# Copyright (c) 2018-2021, 2023 by Rocky Bernstein # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -13,23 +13,20 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Isolate Python 3 version-specific semantic actions here. +""" +Isolate Python 3 version-specific semantic actions here. """ +from xdis import iscode + from uncompyle6.semantics.consts import TABLE_DIRECT - -from xdis import co_flags_is_async, iscode -from uncompyle6.scanner import Code -from uncompyle6.semantics.helper import ( - find_code_node, - gen_function_parens_adjust, -) - -from uncompyle6.semantics.make_function3 import make_function3_annotate from uncompyle6.semantics.customize35 import customize_for_version35 from uncompyle6.semantics.customize36 import customize_for_version36 from uncompyle6.semantics.customize37 import customize_for_version37 from uncompyle6.semantics.customize38 import customize_for_version38 +from uncompyle6.semantics.helper import find_code_node, gen_function_parens_adjust +from uncompyle6.semantics.make_function3 import make_function3_annotate +from uncompyle6.util import get_code_name def customize_for_version3(self, version): @@ -51,7 +48,7 @@ def customize_for_version3(self, version): "import_cont": (", %c", 2), "kwarg": ("%[0]{attr}=%c", 1), "raise_stmt2": ("%|raise %c from %c\n", 0, 1), - "tf_tryelsestmtl3": ( '%c%-%c%|else:\n%+%c', 1, 3, 5 ), + "tf_tryelsestmtl3": ("%c%-%c%|else:\n%+%c", 1, 3, 5), "store_locals": ("%|# inspect.currentframe().f_locals = __locals__\n",), "with": ("%|with %c:\n%+%c%-", 0, 3), "withasstmt": ("%|with %c as (%c):\n%+%c%-", 0, 2, 3), @@ -67,22 +64,22 @@ def customize_for_version3(self, version): # are different. See test_fileio.py for an example that shows this. def tryfinallystmt(node): suite_stmts = node[1][0] - if len(suite_stmts) == 1 and suite_stmts[0] == 'stmt': + if len(suite_stmts) == 1 and suite_stmts[0] == "stmt": stmt = suite_stmts[0] try_something = stmt[0] if try_something == "try_except": try_something.kind = "tf_try_except" if try_something.kind.startswith("tryelsestmt"): if try_something == "tryelsestmtl3": - try_something.kind = 'tf_tryelsestmtl3' + try_something.kind = "tf_tryelsestmtl3" else: - try_something.kind = 'tf_tryelsestmt' + try_something.kind = "tf_tryelsestmt" self.default(node) + self.n_tryfinallystmt = tryfinallystmt def n_classdef3(node): - """Handle "classdef" nonterminal for 3.0 >= version 3.0 < 3.6 - """ + """Handle "classdef" nonterminal for 3.0 >= version 3.0 < 3.6""" assert (3, 0) <= self.version < (3, 6) @@ -191,18 +188,25 @@ def customize_for_version3(self, version): # the iteration variable. These rules we can ignore # since we pick up the iteration variable some other way and # we definitely don't include in the source _[dd]. - TABLE_DIRECT.update({ - "ifstmt30": ( "%|if %c:\n%+%c%-", - (0, "testfalse_then"), - (1, "_ifstmts_jump30") ), - "ifnotstmt30": ( "%|if not %c:\n%+%c%-", - (0, "testtrue_then"), - (1, "_ifstmts_jump30") ), - "try_except30": ( "%|try:\n%+%c%-%c\n\n", - (1, "suite_stmts_opt"), - (4, "except_handler") ), - - }) + TABLE_DIRECT.update( + { + "ifstmt30": ( + "%|if %c:\n%+%c%-", + (0, "testfalse_then"), + (1, "_ifstmts_jump30"), + ), + "ifnotstmt30": ( + "%|if not %c:\n%+%c%-", + (0, "testtrue_then"), + (1, "_ifstmts_jump30"), + ), + "try_except30": ( + "%|try:\n%+%c%-%c\n\n", + (1, "suite_stmts_opt"), + (4, "except_handler"), + ), + } + ) def n_comp_iter(node): if node[0] == "expr": @@ -235,7 +239,6 @@ def customize_for_version3(self, version): if (3, 2) <= version <= (3, 4): def n_call(node): - mapping = self._get_mapping(node) key = node for i in mapping[1:]: @@ -289,24 +292,23 @@ def customize_for_version3(self, version): self.n_call = n_call def n_mkfunc_annotate(node): - # Handling EXTENDED_ARG before MAKE_FUNCTION ... i = -1 if node[-2] == "EXTENDED_ARG" else 0 if self.version < (3, 3): - code = node[-2 + i] + code_node = node[-2 + i] elif self.version >= (3, 3) or node[-2] == "kwargs": # LOAD_CONST code object .. # LOAD_CONST 'x0' if >= 3.3 # EXTENDED_ARG # MAKE_FUNCTION .. - code = node[-3 + i] + code_node = node[-3 + i] elif node[-3] == "expr": - code = node[-3][0] + code_node = node[-3][0] else: # LOAD_CONST code object .. # MAKE_FUNCTION .. - code = node[-3] + code_node = node[-3] self.indent_more() for annotate_last in range(len(node) - 1, -1, -1): @@ -318,11 +320,15 @@ def customize_for_version3(self, version): # But when derived from funcdefdeco it hasn't Would like a better # way to distinquish. if self.f.getvalue()[-4:] == "def ": - self.write(code.attr.co_name) + self.write(get_code_name(code_node.attr)) # FIXME: handle and pass full annotate args make_function3_annotate( - self, node, is_lambda=False, code_node=code, annotate_last=annotate_last + self, + node, + is_lambda=False, + code_node=code_node, + annotate_last=annotate_last, ) if len(self.param_stack) > 1: @@ -339,7 +345,7 @@ def customize_for_version3(self, version): "tryelsestmtl3": ( "%|try:\n%+%c%-%c%|else:\n%+%c%-", (1, "suite_stmts_opt"), - 3, # "except_handler_else" or "except_handler" + 3, # "except_handler_else" or "except_handler" (5, "else_suitel"), ), "LOAD_CLASSDEREF": ("%{pattr}",), diff --git a/uncompyle6/semantics/customize36.py b/uncompyle6/semantics/customize36.py index 30bacc01..9b2c6ae9 100644 --- a/uncompyle6/semantics/customize36.py +++ b/uncompyle6/semantics/customize36.py @@ -1,4 +1,4 @@ -# Copyright (c) 2019-2022 by Rocky Bernstein +# Copyright (c) 2019-2023 by Rocky Bernstein # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -25,6 +25,7 @@ from uncompyle6.semantics.consts import ( TABLE_DIRECT, TABLE_R, ) +from uncompyle6.util import get_code_name def escape_format(s): @@ -190,7 +191,7 @@ def customize_for_version36(self, version): code_node = build_class[1][1] else: code_node = build_class[1][0] - class_name = code_node.attr.co_name + class_name = get_code_name(code_node.attr) assert "mkfunc" == build_class[1] mkfunc = build_class[1] diff --git a/uncompyle6/semantics/fragments.py b/uncompyle6/semantics/fragments.py index 1c25bb71..bb245a06 100644 --- a/uncompyle6/semantics/fragments.py +++ b/uncompyle6/semantics/fragments.py @@ -63,38 +63,33 @@ The node position 0 will be associated with "import". # FIXME: DRY code with pysource -from __future__ import print_function - import re - -from uncompyle6.semantics import pysource -from uncompyle6 import parser -from uncompyle6.scanner import Token, Code, get_scanner -import uncompyle6.parser as python_parser -from uncompyle6.semantics.check_ast import checker - -from uncompyle6.show import maybe_show_asm, maybe_show_tree - -from uncompyle6.parsers.treenode import SyntaxTree - -from uncompyle6.semantics.pysource import ParserError, StringIO -from xdis import iscode -from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE - -from uncompyle6.semantics.consts import ( - INDENT_PER_LEVEL, - NONE, - PRECEDENCE, - TABLE_DIRECT, - escape, - MAP, - PASS, -) +import sys +from collections import namedtuple +from typing import Optional from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from spark_parser.ast import GenericASTTraversalPruningException +from xdis import iscode +from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE -from collections import namedtuple +import uncompyle6.parser as python_parser +from uncompyle6 import parser +from uncompyle6.parsers.treenode import SyntaxTree +from uncompyle6.scanner import Code, Token, get_scanner +from uncompyle6.semantics import pysource +from uncompyle6.semantics.check_ast import checker +from uncompyle6.semantics.consts import ( + INDENT_PER_LEVEL, + MAP, + NONE, + PASS, + PRECEDENCE, + TABLE_DIRECT, + escape, +) +from uncompyle6.semantics.pysource import ParserError, StringIO +from uncompyle6.show import maybe_show_asm, maybe_show_tree NodeInfo = namedtuple("NodeInfo", "node start finish") ExtractInfo = namedtuple( @@ -149,7 +144,6 @@ TABLE_DIRECT_FRAGMENT = { class FragmentsWalker(pysource.SourceWalker, object): - MAP_DIRECT_FRAGMENT = () stacked_params = ("f", "indent", "is_lambda", "_globals") @@ -346,7 +340,6 @@ class FragmentsWalker(pysource.SourceWalker, object): self.prune() # stop recursing def n_return_if_stmt(self, node): - start = len(self.f.getvalue()) + len(self.indent) if self.params["is_lambda"]: node[0].parent = node @@ -667,7 +660,7 @@ class FragmentsWalker(pysource.SourceWalker, object): assert n == "comp_iter" # Find the comprehension body. It is the inner-most # node that is not list_.. . - while n == "comp_iter": # list_iter + while n == "comp_iter": # list_iter n = n[0] # recurse one step if n == "comp_for": if n[0] == "SETUP_LOOP": @@ -1123,8 +1116,9 @@ class FragmentsWalker(pysource.SourceWalker, object): n_classdefdeco2 = n_classdef - def gen_source(self, ast, name, customize, is_lambda=False, returnNone=False, - debug_opts=None): + def gen_source( + self, ast, name, customize, is_lambda=False, returnNone=False, debug_opts=None + ): """convert parse tree to Python source code""" rn = self.return_none @@ -1150,7 +1144,6 @@ class FragmentsWalker(pysource.SourceWalker, object): noneInNames=False, is_top_level_module=False, ): - # FIXME: DRY with pysource.py # assert isinstance(tokens[0], Token) @@ -1463,7 +1456,6 @@ class FragmentsWalker(pysource.SourceWalker, object): self.set_pos_info(node, start, len(self.f.getvalue())) def print_super_classes3(self, node): - # FIXME: wrap superclasses onto a node # as a custom rule start = len(self.f.getvalue()) @@ -1482,7 +1474,7 @@ class FragmentsWalker(pysource.SourceWalker, object): # FIXME: this doesn't handle positional and keyword args # properly. Need to do something more like that below # in the non-PYPY 3.6 case. - self.template_engine(('(%[0]{attr}=%c)', 1), node[n-1]) + self.template_engine(("(%[0]{attr}=%c)", 1), node[n - 1]) return else: kwargs = node[n - 1].attr @@ -1846,9 +1838,13 @@ class FragmentsWalker(pysource.SourceWalker, object): index = entry[arg] if isinstance(index, tuple): - assert node[index[0]] == index[1], ( - "at %s[%d], expected %s node; got %s" - % (node.kind, arg, node[index[0]].kind, index[1]) + assert ( + node[index[0]] == index[1] + ), "at %s[%d], expected %s node; got %s" % ( + node.kind, + arg, + node[index[0]].kind, + index[1], ) index = index[0] assert isinstance( @@ -1869,9 +1865,13 @@ class FragmentsWalker(pysource.SourceWalker, object): assert isinstance(tup, tuple) if len(tup) == 3: (index, nonterm_name, self.prec) = tup - assert node[index] == nonterm_name, ( - "at %s[%d], expected '%s' node; got '%s'" - % (node.kind, arg, nonterm_name, node[index].kind) + assert ( + node[index] == nonterm_name + ), "at %s[%d], expected '%s' node; got '%s'" % ( + node.kind, + arg, + nonterm_name, + node[index].kind, ) else: assert len(tup) == 2 @@ -1984,6 +1984,7 @@ class FragmentsWalker(pysource.SourceWalker, object): # DEFAULT_DEBUG_OPTS = {"asm": False, "tree": False, "grammar": False} + # This interface is deprecated def deparse_code( version, @@ -2074,7 +2075,9 @@ def code_deparse( ) is_top_level_module = co.co_name == "" - deparsed.ast = deparsed.build_ast(tokens, customize, co, is_top_level_module=is_top_level_module) + deparsed.ast = deparsed.build_ast( + tokens, customize, co, is_top_level_module=is_top_level_module + ) assert deparsed.ast == "stmts", "Should have parsed grammar start" @@ -2084,7 +2087,7 @@ def code_deparse( # convert leading '__doc__ = "..." into doc string assert deparsed.ast == "stmts" - (deparsed.mod_globs, nonlocals) = pysource.find_globals_and_nonlocals( + (deparsed.mod_globs, _) = pysource.find_globals_and_nonlocals( deparsed.ast, set(), set(), co, version ) @@ -2135,7 +2138,7 @@ def code_deparse_around_offset( offset, co, out=StringIO(), - version=None, + version=Optional[tuple], is_pypy=None, debug_opts=DEFAULT_DEBUG_OPTS, ): @@ -2147,7 +2150,7 @@ def code_deparse_around_offset( assert iscode(co) if version is None: - version = sysinfo2float() + version = sys.version_info[:3] if is_pypy is None: is_pypy = IS_PYPY @@ -2200,8 +2203,7 @@ def deparsed_find(tup, deparsed, code): """Return a NodeInfo nametuple for a fragment-deparsed `deparsed` at `tup`. `tup` is a name and offset tuple, `deparsed` is a fragment object - and `code` is instruction bytecode. -""" + and `code` is instruction bytecode.""" nodeInfo = None name, last_i = tup if not hasattr(deparsed, "offsets"): diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index d61cf517..e22588f3 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -25,7 +25,7 @@ from uncompyle6.semantics.consts import ( from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.scanners.tok import Token -from uncompyle6.util import better_repr +from uncompyle6.util import better_repr, get_code_name from uncompyle6.semantics.helper import ( find_code_node, @@ -1040,7 +1040,7 @@ class NonterminalActions: def n_mkfunc(self, node): code_node = find_code_node(node, -2) code = code_node.attr - self.write(code.co_name) + self.write(get_code_name(code)) self.indent_more() self.make_function(node, is_lambda=False, code_node=code_node) diff --git a/uncompyle6/util.py b/uncompyle6/util.py index 888ed368..79b4fbe6 100644 --- a/uncompyle6/util.py +++ b/uncompyle6/util.py @@ -3,8 +3,14 @@ # More could be done here though. from math import copysign +from xdis.codetype import UnicodeForPython3 from xdis.version_info import PYTHON_VERSION_TRIPLE +def get_code_name(code) -> str: + code_name = code.co_name + if isinstance(code_name, UnicodeForPython3): + return code_name.value.decode("utf-8") + return code_name def is_negative_zero(n): """Returns true if n is -0.0""" From 99f054ea9df58c422a606a4c7d2bae9a021f3669 Mon Sep 17 00:00:00 2001 From: rocky Date: Sat, 1 Jul 2023 23:22:57 -0400 Subject: [PATCH 07/12] Forgot to include 3.3 in recent generator fix --- uncompyle6/semantics/n_actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uncompyle6/semantics/n_actions.py b/uncompyle6/semantics/n_actions.py index e22588f3..0747202b 100644 --- a/uncompyle6/semantics/n_actions.py +++ b/uncompyle6/semantics/n_actions.py @@ -701,7 +701,7 @@ class NonterminalActions: self.write("(") iter_index = 3 if self.version > (3, 2): - if self.version >= (3, 4): + if self.version >= (3, 3): if node[0].kind in ( "load_closure", "load_genexpr", From 227f494fa871925ab890efa9a824858efbcff0a7 Mon Sep 17 00:00:00 2001 From: rocky Date: Tue, 4 Jul 2023 07:13:54 -0400 Subject: [PATCH 08/12] Double -a option show asm before tokenization --- uncompyle6/bin/uncompile.py | 19 ++++++++++++------- uncompyle6/main.py | 11 ++++------- uncompyle6/scanners/scanner2.py | 14 +++++++++++--- uncompyle6/scanners/scanner3.py | 13 +++++++++++-- uncompyle6/scanners/scanner37base.py | 16 ++++++++++++---- 5 files changed, 50 insertions(+), 23 deletions(-) diff --git a/uncompyle6/bin/uncompile.py b/uncompyle6/bin/uncompile.py index 28f571f8..fc7e7556 100755 --- a/uncompyle6/bin/uncompile.py +++ b/uncompyle6/bin/uncompile.py @@ -11,7 +11,9 @@ import os import sys import time -from xdis.version_info import version_tuple_to_str +from uncompyle6 import verify +from uncompyle6.main import main, status_msg +from uncompyle6.version import __version__ program = "uncompyle6" @@ -69,10 +71,6 @@ Extensions of generated files: program = "uncompyle6" -from uncompyle6 import verify -from uncompyle6.main import main, status_msg -from uncompyle6.version import __version__ - def usage(): print(__doc__) @@ -102,7 +100,9 @@ def main_bin(): print("%s: %s" % (os.path.basename(sys.argv[0]), e), file=sys.stderr) sys.exit(-1) - options = {} + options = { + "showasm": None + } for opt, val in opts: if opt in ("-h", "--help"): print(__doc__) @@ -121,7 +121,10 @@ def main_bin(): elif opt == "--linemaps": options["do_linemaps"] = True elif opt in ("--asm", "-a"): - options["showasm"] = "after" + if options["showasm"] == None: + options["showasm"] = "after" + else: + options["showasm"] = "both" options["do_verify"] = None elif opt in ("--tree", "-t"): if "showast" not in options: @@ -227,6 +230,8 @@ def main_bin(): rqueue = Queue(numproc) + tot_files = okay_files = failed_files = verify_failed_files = 0 + def process_func(): try: (tot_files, okay_files, failed_files, verify_failed_files) = ( diff --git a/uncompyle6/main.py b/uncompyle6/main.py index 72b86010..669643e6 100644 --- a/uncompyle6/main.py +++ b/uncompyle6/main.py @@ -17,7 +17,7 @@ import datetime import os import py_compile import sys -from typing import Any, Tuple +from typing import Any, Optional, Tuple from xdis import iscode from xdis.load import load_module @@ -50,7 +50,7 @@ def decompile( co, bytecode_version: Tuple[int] = PYTHON_VERSION_TRIPLE, out=sys.stdout, - showasm=None, + showasm: Optional[str]=None, showast={}, timestamp=None, showgrammar=False, @@ -107,14 +107,11 @@ def decompile( if source_size: write("# Size of source mod 2**32: %d bytes" % source_size) - # maybe a second -a will do before as well - asm = "after" if showasm else None - grammar = dict(PARSER_DEFAULT_DEBUG) if showgrammar: grammar["reduce"] = True - debug_opts = {"asm": asm, "tree": showast, "grammar": grammar} + debug_opts = {"asm": showasm, "tree": showast, "grammar": grammar} try: if mapstream: @@ -244,7 +241,7 @@ def main( compiled_files: list, source_files: list, outfile=None, - showasm=None, + showasm: Optional[str] = None, showast={}, do_verify=False, showgrammar=False, diff --git a/uncompyle6/scanners/scanner2.py b/uncompyle6/scanners/scanner2.py index 8b6c3c23..77d7fcae 100644 --- a/uncompyle6/scanners/scanner2.py +++ b/uncompyle6/scanners/scanner2.py @@ -205,10 +205,17 @@ class Scanner2(Scanner): bytecode = self.build_instructions(co) - # show_asm = 'after' if show_asm in ("both", "before"): - for instr in bytecode.get_instructions(co): - print(instr.disassemble()) + print("\n# ---- before tokenization:") + bytecode.disassemble_bytes( + co.co_code, + varnames=co.co_varnames, + names=co.co_names, + constants=co.co_consts, + cells=bytecode._cell_names, + linestarts=bytecode._linestarts, + asm_format="extended", + ) # list of tokens/instructions new_tokens = [] @@ -483,6 +490,7 @@ class Scanner2(Scanner): pass if show_asm in ("both", "after"): + print("\n# ---- after tokenization:") for t in new_tokens: print(t.format(line_prefix="")) print() diff --git a/uncompyle6/scanners/scanner3.py b/uncompyle6/scanners/scanner3.py index 69ab2749..62c47d74 100644 --- a/uncompyle6/scanners/scanner3.py +++ b/uncompyle6/scanners/scanner3.py @@ -414,8 +414,16 @@ class Scanner3(Scanner): # show_asm = 'both' if show_asm in ("both", "before"): - for instr in bytecode.get_instructions(co): - print(instr.disassemble()) + print("\n# ---- before tokenization:") + bytecode.disassemble_bytes( + co.co_code, + varnames=co.co_varnames, + names=co.co_names, + constants=co.co_consts, + cells=bytecode._cell_names, + linestarts=bytecode._linestarts, + asm_format="extended", + ) # "customize" is in the process of going away here customize = {} @@ -777,6 +785,7 @@ class Scanner3(Scanner): pass if show_asm in ("both", "after"): + print("\n# ---- after tokenization:") for t in new_tokens: print(t.format(line_prefix="")) print() diff --git a/uncompyle6/scanners/scanner37base.py b/uncompyle6/scanners/scanner37base.py index 1c6c499b..bc27e41a 100644 --- a/uncompyle6/scanners/scanner37base.py +++ b/uncompyle6/scanners/scanner37base.py @@ -1,4 +1,4 @@ -# Copyright (c) 2015-2020, 2022 by Rocky Bernstein +# Copyright (c) 2015-2020, 2022-2023 by Rocky Bernstein # Copyright (c) 2005 by Dan Pascu # Copyright (c) 2000-2002 by hartmut Goebel # @@ -219,10 +219,17 @@ class Scanner37Base(Scanner): bytecode = self.build_instructions(co) - # show_asm = 'both' if show_asm in ("both", "before"): - for instr in bytecode.get_instructions(co): - print(instr.disassemble(self.opc)) + print("\n# ---- before tokenization:") + bytecode.disassemble_bytes( + co.co_code, + varnames=co.co_varnames, + names=co.co_names, + constants=co.co_consts, + cells=bytecode._cell_names, + linestarts=bytecode._linestarts, + asm_format="extended", + ) # "customize" is in the process of going away here customize = {} @@ -525,6 +532,7 @@ class Scanner37Base(Scanner): pass if show_asm in ("both", "after"): + print("\n# ---- after tokenization:") for t in tokens: print(t.format(line_prefix="")) print() From 3a8f3e550da065bfa906347b9f765b42e1269b4e Mon Sep 17 00:00:00 2001 From: rocky Date: Wed, 5 Jul 2023 07:59:04 -0400 Subject: [PATCH 09/12] Include xdis version in bug report --- .github/ISSUE_TEMPLATE/bug-report.md | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index c1aaec72..2828f148 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -113,6 +113,7 @@ If this is too long, then try narrowing the problem to something short. Please modify for your setup - Uncompyle6 version: output from `uncompyle6 --version` or `pip show uncompyle6` +- xdis version: output from `pydisasm --version` or or `pip show xdis` - Python version for the version of Python the byte-compiled the file: `python -c "import sys; print(sys.version)"` where `python` is the correct CPython or PyPy binary. - OS and Version: [e.g. Ubuntu bionic] From ea76de02bd4dc85d414422a1fdeeeab19dbdcc46 Mon Sep 17 00:00:00 2001 From: rocky Date: Fri, 7 Jul 2023 10:13:14 -0400 Subject: [PATCH 10/12] Tweak bug-report --- .github/ISSUE_TEMPLATE/bug-report.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index 2828f148..2cd81f4e 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -17,9 +17,9 @@ are trying to solve that involve the use of uncompyle6 along the way, although I may be more tolerant of this if you sponsor the project. Bugs are also not for general or novice kind help on how to install -this Python program in your environment in the way you would like to -have it set up, or how to interpret a Python traceback e.g. that winds -up saying Python X.Y.Z is not supported. +this Python program and its dependencies in your environment, or in +the way you would like to have it set up, or how to interpret a Python +traceback e.g. that winds up saying Python X.Y.Z is not supported. For these kinds of things, you will save yourself time by asking instead on forums like StackOverflow that are geared to helping people From 843e3585e2adfb753233ef1f7dfee87142503c03 Mon Sep 17 00:00:00 2001 From: rocky Date: Sat, 29 Jul 2023 12:01:14 -0400 Subject: [PATCH 11/12] chained-compare1 -> chained-compare-middle --- uncompyle6/parser.py | 2 +- uncompyle6/parsers/parse26.py | 25 ++++++++-------- uncompyle6/parsers/parse27.py | 19 +++++++----- uncompyle6/parsers/parse3.py | 35 +++++++++++----------- uncompyle6/parsers/parse30.py | 24 ++++++++------- uncompyle6/parsers/parse37.py | 46 ++++++++++++++++------------- uncompyle6/semantics/consts.py | 2 +- uncompyle6/semantics/customize37.py | 16 +++++----- 8 files changed, 90 insertions(+), 79 deletions(-) diff --git a/uncompyle6/parser.py b/uncompyle6/parser.py index 97dc5f9d..dbeaf73f 100644 --- a/uncompyle6/parser.py +++ b/uncompyle6/parser.py @@ -598,7 +598,7 @@ class PythonParser(GenericASTBuilder): compare_single ::= expr expr COMPARE_OP # A compare_chained is two comparisions like x <= y <= z - compare_chained ::= expr compare_chained1 ROT_TWO POP_TOP _come_froms + compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP _come_froms compare_chained2 ::= expr COMPARE_OP JUMP_FORWARD # Non-null kvlist items are broken out in the indiviual grammars diff --git a/uncompyle6/parsers/parse26.py b/uncompyle6/parsers/parse26.py index 65b59740..1c90b194 100644 --- a/uncompyle6/parsers/parse26.py +++ b/uncompyle6/parsers/parse26.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017-2022 Rocky Bernstein +# Copyright (c) 2017-2023 Rocky Bernstein """ spark grammar differences over Python2 for Python 2.6. """ @@ -307,17 +307,18 @@ class Python26Parser(Python2Parser): and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP - # compare_chained is like x <= y <= z - compare_chained ::= expr compare_chained1 ROT_TWO COME_FROM POP_TOP _come_froms - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compare_chained1 _come_froms - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compare_chained2 _come_froms + # compare_chained is x <= y <= z + compare_chained ::= expr compared_chained_middle ROT_TWO + COME_FROM POP_TOP _come_froms + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + jmp_false compared_chained_middle _come_froms + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + jmp_false compare_chained2 _come_froms - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false_then compare_chained1 _come_froms - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false_then compare_chained2 _come_froms + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + jmp_false_then compared_chained_middle _come_froms + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + jmp_false_then compare_chained2 _come_froms compare_chained2 ::= expr COMPARE_OP return_expr_lambda compare_chained2 ::= expr COMPARE_OP RETURN_END_IF_LAMBDA @@ -565,7 +566,7 @@ if __name__ == "__main__": remain_tokens = set(tokens) - opcode_set import re - remain_tokens = set([re.sub("_\d+$", "", t) for t in remain_tokens]) + remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens]) remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens]) remain_tokens = set(remain_tokens) - opcode_set print(remain_tokens) diff --git a/uncompyle6/parsers/parse27.py b/uncompyle6/parsers/parse27.py index 81223340..50e1503a 100644 --- a/uncompyle6/parsers/parse27.py +++ b/uncompyle6/parsers/parse27.py @@ -1,4 +1,4 @@ -# Copyright (c) 2016-2020, 2022 Rocky Bernstein +# Copyright (c) 2016-2020, 2023 Rocky Bernstein # Copyright (c) 2005 by Dan Pascu # Copyright (c) 2000-2002 by hartmut Goebel @@ -115,11 +115,12 @@ class Python27Parser(Python2Parser): or ::= expr_jitop expr COME_FROM and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM - # compare_chained{1,2} is used exclusively in chained_compare - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained1 COME_FROM - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + # compare_chained{middle,2} is used exclusively in chained_compare + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + JUMP_IF_FALSE_OR_POP compared_chained_middle + COME_FROM + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + JUMP_IF_FALSE_OR_POP compare_chained2 COME_FROM return_lambda ::= RETURN_VALUE return_lambda ::= RETURN_VALUE_LAMBDA @@ -177,11 +178,13 @@ class Python27Parser(Python2Parser): while1stmt ::= SETUP_LOOP returns pb_come_from while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM - whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK _come_froms + whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK + _come_froms # Should this be JUMP_BACK+ ? # JUMP_BACK should all be to the same location - whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK JUMP_BACK POP_BLOCK _come_froms + whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK + JUMP_BACK POP_BLOCK _come_froms while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK POP_BLOCK else_suitel COME_FROM diff --git a/uncompyle6/parsers/parse3.py b/uncompyle6/parsers/parse3.py index e71d3100..ab343826 100644 --- a/uncompyle6/parsers/parse3.py +++ b/uncompyle6/parsers/parse3.py @@ -346,14 +346,15 @@ class Python3Parser(PythonParser): # FIXME: Common with 2.7 ret_and ::= expr JUMP_IF_FALSE_OR_POP return_expr_or_cond COME_FROM ret_or ::= expr JUMP_IF_TRUE_OR_POP return_expr_or_cond COME_FROM - if_exp_ret ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF COME_FROM return_expr_or_cond + if_exp_ret ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF COME_FROM + return_expr_or_cond - # compare_chained1 is used exclusively in chained_compare - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained1 COME_FROM - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + # compared_chained_middle is used exclusively in chained_compare + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP + compared_chained_middle COME_FROM + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP + compare_chained2 COME_FROM """ def p_stmt3(self, args): @@ -419,24 +420,24 @@ class Python3Parser(PythonParser): for ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK COME_FROM_LOOP - forelsestmt ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK else_suite - COME_FROM_LOOP + forelsestmt ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK + else_suite COME_FROM_LOOP - forelselaststmt ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK else_suitec - COME_FROM_LOOP + forelselaststmt ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK + else_suitec COME_FROM_LOOP - forelselaststmtl ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK else_suitel - COME_FROM_LOOP + forelselaststmtl ::= SETUP_LOOP expr for_iter store for_block POP_BLOCK + else_suitel COME_FROM_LOOP - whilestmt ::= SETUP_LOOP testexpr l_stmts_opt COME_FROM JUMP_BACK POP_BLOCK - COME_FROM_LOOP + whilestmt ::= SETUP_LOOP testexpr l_stmts_opt COME_FROM JUMP_BACK + POP_BLOCK COME_FROM_LOOP - whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK JUMP_BACK - COME_FROM_LOOP + whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK + JUMP_BACK COME_FROM_LOOP whilestmt ::= SETUP_LOOP testexpr l_stmts_opt JUMP_BACK POP_BLOCK COME_FROM_LOOP - whilestmt ::= SETUP_LOOP testexpr returns POP_BLOCK + whilestmt ::= SETUP_LOOP testexpr returns POP_BLOCK COME_FROM_LOOP while1elsestmt ::= SETUP_LOOP l_stmts JUMP_BACK diff --git a/uncompyle6/parsers/parse30.py b/uncompyle6/parsers/parse30.py index 9537d1ba..df5eaf99 100644 --- a/uncompyle6/parsers/parse30.py +++ b/uncompyle6/parsers/parse30.py @@ -14,9 +14,10 @@ class Python30Parser(Python31Parser): pt_bp ::= POP_TOP POP_BLOCK - assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 COME_FROM POP_TOP - assert2 ::= assert_expr jmp_true LOAD_ASSERT expr CALL_FUNCTION_1 RAISE_VARARGS_1 - come_froms + assert ::= assert_expr jmp_true LOAD_ASSERT RAISE_VARARGS_1 + COME_FROM POP_TOP + assert2 ::= assert_expr jmp_true LOAD_ASSERT expr CALL_FUNCTION_1 + RAISE_VARARGS_1 come_froms call_stmt ::= expr _come_froms POP_TOP return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM POP_TOP @@ -205,10 +206,10 @@ class Python30Parser(Python31Parser): come_froms POP_TOP POP_BLOCK COME_FROM_LOOP - # compare_chained is like x <= y <= z - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compare_chained1 _come_froms - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP + # compare_chained is x <= y <= z + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + jmp_false compared_chained_middle _come_froms + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP jmp_false compare_chained2 _come_froms compare_chained2 ::= expr COMPARE_OP RETURN_END_IF """ @@ -270,10 +271,11 @@ class Python30Parser(Python31Parser): jmp_true ::= JUMP_IF_TRUE_OR_POP POP_TOP jmp_true ::= POP_JUMP_IF_TRUE - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained1 COME_FROM - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + JUMP_IF_FALSE_OR_POP compared_chained_middle + COME_FROM + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP + JUMP_IF_FALSE_OR_POP compare_chained2 COME_FROM ret_or ::= expr JUMP_IF_TRUE_OR_POP return_expr_or_cond COME_FROM ret_and ::= expr JUMP_IF_FALSE_OR_POP return_expr_or_cond COME_FROM if_exp_ret ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF diff --git a/uncompyle6/parsers/parse37.py b/uncompyle6/parsers/parse37.py index 265a131d..d73be25d 100644 --- a/uncompyle6/parsers/parse37.py +++ b/uncompyle6/parsers/parse37.py @@ -1,4 +1,4 @@ -# Copyright (c) 2017-2020, 2022 Rocky Bernstein +# Copyright (c) 2017-2020, 2022-2023 Rocky Bernstein # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -130,7 +130,8 @@ class Python37Parser(Python37BaseParser): stmt ::= return return ::= return_expr RETURN_VALUE - # "returns" nonterminal is a sequence of statements that ends in a RETURN statement. + # "returns" nonterminal is a sequence of statements that ends in a + # RETURN statement. # In later Python versions with jump optimization, this can cause JUMPs # that would normally appear to be omitted. @@ -221,7 +222,7 @@ class Python37Parser(Python37BaseParser): compare_single ::= expr expr COMPARE_OP # A compare_chained is two comparisions like x <= y <= z - compare_chained ::= expr compare_chained1 ROT_TWO POP_TOP _come_froms + compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP _come_froms compare_chained2 ::= expr COMPARE_OP JUMP_FORWARD # Non-null kvlist items are broken out in the indiviual grammars @@ -625,25 +626,25 @@ class Python37Parser(Python37BaseParser): compare_chained ::= compare_chained37 compare_chained ::= compare_chained37_false - compare_chained37 ::= expr compare_chained1a_37 - compare_chained37 ::= expr compare_chained1c_37 + compare_chained37 ::= expr compared_chained_middlea_37 + compare_chained37 ::= expr compared_chained_middlec_37 - compare_chained37_false ::= expr compare_chained1_false_37 - compare_chained37_false ::= expr compare_chained1b_false_37 + compare_chained37_false ::= expr compared_chained_middle_false_37 + compare_chained37_false ::= expr compared_chained_middleb_false_37 compare_chained37_false ::= expr compare_chained2_false_37 - compare_chained1a_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained1a_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middlea_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middlea_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compare_chained2a_37 COME_FROM POP_TOP COME_FROM - compare_chained1b_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middleb_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compare_chained2b_false_37 POP_TOP _jump COME_FROM - compare_chained1c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middlec_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compare_chained2a_37 POP_TOP - compare_chained1_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middle_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compare_chained2c_37 POP_TOP JUMP_FORWARD COME_FROM - compare_chained1_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compared_chained_middle_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compare_chained2b_false_37 POP_TOP _jump COME_FROM compare_chained2_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE @@ -667,11 +668,13 @@ class Python37Parser(Python37BaseParser): expr ::= if_exp37 if_exp37 ::= expr expr jf_cfs expr COME_FROM jf_cfs ::= JUMP_FORWARD _come_froms - ifelsestmt ::= testexpr c_stmts_opt jf_cfs else_suite opt_come_from_except + ifelsestmt ::= testexpr c_stmts_opt jf_cfs else_suite + opt_come_from_except # This is probably more realistically an "ifstmt" (with a null else) # see _cmp() of python3.8/distutils/__pycache__/version.cpython-38.opt-1.pyc - ifelsestmt ::= testexpr stmts jf_cfs else_suite_opt opt_come_from_except + ifelsestmt ::= testexpr stmts jf_cfs else_suite_opt + opt_come_from_except expr_pjit ::= expr POP_JUMP_IF_TRUE @@ -694,7 +697,8 @@ class Python37Parser(Python37BaseParser): expr ::= if_exp_37a expr ::= if_exp_37b if_exp_37a ::= and_not expr JUMP_FORWARD come_froms expr COME_FROM - if_exp_37b ::= expr jmp_false expr POP_JUMP_IF_FALSE jump_forward_else expr + if_exp_37b ::= expr jmp_false expr POP_JUMP_IF_FALSE + jump_forward_else expr jmp_false_cf ::= POP_JUMP_IF_FALSE COME_FROM comp_if ::= or jmp_false_cf comp_iter """ @@ -1014,11 +1018,11 @@ class Python37Parser(Python37BaseParser): and ::= expr jmp_false expr COME_FROM or ::= expr_jt expr COME_FROM - # compare_chained1 is used exclusively in chained_compare - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained1 COME_FROM - compare_chained1 ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + # compared_chained_middle is used exclusively in chained_compare + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP + compared_chained_middle COME_FROM + compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP + compare_chained2 COME_FROM """ def p_stmt3(self, args): diff --git a/uncompyle6/semantics/consts.py b/uncompyle6/semantics/consts.py index 06c9d91c..8981d521 100644 --- a/uncompyle6/semantics/consts.py +++ b/uncompyle6/semantics/consts.py @@ -338,7 +338,7 @@ TABLE_DIRECT = { "if_exp_not_lambda": ("%p if not %c else %c", (2, "expr", 27), 0, 4), "compare_single": ('%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19)), "compare_chained": ("%p %p", (0, 29), (1, 30)), - "compare_chained1": ('%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (-2, 19)), + "compared_chained_middle": ('%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (-2, 19)), "compare_chained2": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)), # "classdef": (), # handled by n_classdef() # A custom rule in n_function def distinguishes whether to call this or diff --git a/uncompyle6/semantics/customize37.py b/uncompyle6/semantics/customize37.py index 97109997..67613e0f 100644 --- a/uncompyle6/semantics/customize37.py +++ b/uncompyle6/semantics/customize37.py @@ -99,12 +99,12 @@ def customize_for_version37(self, version): "await_stmt": ("%|%c\n", 0), "c_async_with_stmt": ("%|async with %c:\n%+%c%-", (0, "expr"), 3), "call_ex": ("%c(%p)", (0, "expr"), (1, 100)), - "compare_chained1a_37": ( + "compared_chained_middlea_37": ( ' %[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (-4, PRECEDENCE["compare"] - 1), ), - "compare_chained1_false_37": ( + "compared_chained_middle_false_37": ( ' %[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (-4, PRECEDENCE["compare"] - 1), @@ -114,12 +114,12 @@ def customize_for_version37(self, version): (0, PRECEDENCE["compare"] - 1), (-5, PRECEDENCE["compare"] - 1), ), - "compare_chained1b_false_37": ( + "compared_chained_middleb_false_37": ( ' %[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (-4, PRECEDENCE["compare"] - 1), ), - "compare_chained1c_37": ( + "compared_chained_middlec_37": ( ' %[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (-2, PRECEDENCE["compare"] - 1), @@ -461,12 +461,12 @@ def customize_for_version37(self, version): compare_chained37 = node[0] if ( compare_chained37 == "compare_chained37" - and compare_chained37[1] == "compare_chained1b_37" + and compare_chained37[1] == "compared_chained_middleb_37" ): - compare_chained1b_37 = compare_chained37[1] + compared_chained_middleb_37 = compare_chained37[1] if ( - len(compare_chained1b_37) > 2 - and compare_chained1b_37[-2] == "JUMP_FORWARD" + len(compared_chained_middleb_37) > 2 + and compared_chained_middleb_37[-2] == "JUMP_FORWARD" ): node.kind = "testfalse" pass From ddeb5af6d6ee7fb72a7c5d1c846f321502acfeda Mon Sep 17 00:00:00 2001 From: rocky Date: Sat, 29 Jul 2023 12:09:25 -0400 Subject: [PATCH 12/12] compare_chained2 _> compare_chained_right --- test/simple_source/bug33/01_triple_compare.py | 4 +- uncompyle6/parser.py | 7 +-- uncompyle6/parsers/parse26.py | 12 ++--- uncompyle6/parsers/parse27.py | 6 +-- uncompyle6/parsers/parse3.py | 2 +- uncompyle6/parsers/parse30.py | 14 +++--- uncompyle6/parsers/parse32.py | 7 ++- uncompyle6/parsers/parse36.py | 2 +- uncompyle6/parsers/parse37.py | 46 +++++++++---------- uncompyle6/semantics/consts.py | 2 +- uncompyle6/semantics/customize37.py | 10 ++-- 11 files changed, 56 insertions(+), 56 deletions(-) diff --git a/test/simple_source/bug33/01_triple_compare.py b/test/simple_source/bug33/01_triple_compare.py index dc28d36c..cdbc18ab 100644 --- a/test/simple_source/bug33/01_triple_compare.py +++ b/test/simple_source/bug33/01_triple_compare.py @@ -1,7 +1,7 @@ # In Python 3.3+ this uses grammar rule -# compare_chained2 ::= expr COMPARE_OP RETURN_VALUE +# compare_chained_right ::= expr COMPARE_OP RETURN_VALUE # In Python 3.6 uses this uses grammar rule -# compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD +# compare_chained_right ::= expr COMPARE_OP come_froms JUMP_FORWARD # Seen in Python 3.3 ipaddress.py diff --git a/uncompyle6/parser.py b/uncompyle6/parser.py index dbeaf73f..8b91f040 100644 --- a/uncompyle6/parser.py +++ b/uncompyle6/parser.py @@ -597,9 +597,10 @@ class PythonParser(GenericASTBuilder): compare ::= compare_single compare_single ::= expr expr COMPARE_OP - # A compare_chained is two comparisions like x <= y <= z - compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP _come_froms - compare_chained2 ::= expr COMPARE_OP JUMP_FORWARD + # A compare_chained is two comparisions, as in: x <= y <= z + compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP + _come_froms + compare_chained_right ::= expr COMPARE_OP JUMP_FORWARD # Non-null kvlist items are broken out in the indiviual grammars kvlist ::= diff --git a/uncompyle6/parsers/parse26.py b/uncompyle6/parsers/parse26.py index 1c90b194..74370369 100644 --- a/uncompyle6/parsers/parse26.py +++ b/uncompyle6/parsers/parse26.py @@ -307,22 +307,22 @@ class Python26Parser(Python2Parser): and ::= expr JUMP_IF_FALSE POP_TOP expr JUMP_IF_FALSE POP_TOP - # compare_chained is x <= y <= z + # A "compare_chained" is two comparisions like x <= y <= z compare_chained ::= expr compared_chained_middle ROT_TWO COME_FROM POP_TOP _come_froms compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP jmp_false compared_chained_middle _come_froms compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compare_chained2 _come_froms + jmp_false compare_chained_right _come_froms compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP jmp_false_then compared_chained_middle _come_froms compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false_then compare_chained2 _come_froms + jmp_false_then compare_chained_right _come_froms - compare_chained2 ::= expr COMPARE_OP return_expr_lambda - compare_chained2 ::= expr COMPARE_OP RETURN_END_IF_LAMBDA - compare_chained2 ::= expr COMPARE_OP RETURN_END_IF COME_FROM + compare_chained_right ::= expr COMPARE_OP return_expr_lambda + compare_chained_right ::= expr COMPARE_OP RETURN_END_IF_LAMBDA + compare_chained_right ::= expr COMPARE_OP RETURN_END_IF COME_FROM return_if_lambda ::= RETURN_END_IF_LAMBDA POP_TOP stmt ::= if_exp_lambda diff --git a/uncompyle6/parsers/parse27.py b/uncompyle6/parsers/parse27.py index 50e1503a..92f0d4b6 100644 --- a/uncompyle6/parsers/parse27.py +++ b/uncompyle6/parsers/parse27.py @@ -120,13 +120,13 @@ class Python27Parser(Python2Parser): JUMP_IF_FALSE_OR_POP compared_chained_middle COME_FROM compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - JUMP_IF_FALSE_OR_POP compare_chained2 COME_FROM + JUMP_IF_FALSE_OR_POP compare_chained_right COME_FROM return_lambda ::= RETURN_VALUE return_lambda ::= RETURN_VALUE_LAMBDA - compare_chained2 ::= expr COMPARE_OP return_lambda - compare_chained2 ::= expr COMPARE_OP return_lambda + compare_chained_right ::= expr COMPARE_OP return_lambda + compare_chained_right ::= expr COMPARE_OP return_lambda # if_exp_true are for conditions which always evaluate true # There is dead or non-optional remnants of the condition code though, diff --git a/uncompyle6/parsers/parse3.py b/uncompyle6/parsers/parse3.py index ab343826..e43f07b1 100644 --- a/uncompyle6/parsers/parse3.py +++ b/uncompyle6/parsers/parse3.py @@ -354,7 +354,7 @@ class Python3Parser(PythonParser): compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP compared_chained_middle COME_FROM compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + compare_chained_right COME_FROM """ def p_stmt3(self, args): diff --git a/uncompyle6/parsers/parse30.py b/uncompyle6/parsers/parse30.py index df5eaf99..f9106665 100644 --- a/uncompyle6/parsers/parse30.py +++ b/uncompyle6/parsers/parse30.py @@ -20,8 +20,8 @@ class Python30Parser(Python31Parser): RAISE_VARARGS_1 come_froms call_stmt ::= expr _come_froms POP_TOP - return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM POP_TOP - compare_chained2 ::= expr COMPARE_OP RETURN_END_IF_LAMBDA + return_if_lambda ::= RETURN_END_IF_LAMBDA COME_FROM POP_TOP + compare_chained_right ::= expr COMPARE_OP RETURN_END_IF_LAMBDA # FIXME: combine with parse3.2 whileTruestmt ::= SETUP_LOOP l_stmts_opt @@ -206,12 +206,12 @@ class Python30Parser(Python31Parser): come_froms POP_TOP POP_BLOCK COME_FROM_LOOP - # compare_chained is x <= y <= z + # A "compare_chained" is two comparisions like x <= y <= z compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compared_chained_middle _come_froms + jmp_false compared_chained_middle _come_froms compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - jmp_false compare_chained2 _come_froms - compare_chained2 ::= expr COMPARE_OP RETURN_END_IF + jmp_false compare_chained_right _come_froms + compare_chained_right ::= expr COMPARE_OP RETURN_END_IF """ @@ -275,7 +275,7 @@ class Python30Parser(Python31Parser): JUMP_IF_FALSE_OR_POP compared_chained_middle COME_FROM compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP - JUMP_IF_FALSE_OR_POP compare_chained2 COME_FROM + JUMP_IF_FALSE_OR_POP compare_chained_right COME_FROM ret_or ::= expr JUMP_IF_TRUE_OR_POP return_expr_or_cond COME_FROM ret_and ::= expr JUMP_IF_FALSE_OR_POP return_expr_or_cond COME_FROM if_exp_ret ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF diff --git a/uncompyle6/parsers/parse32.py b/uncompyle6/parsers/parse32.py index f0706798..923aca73 100644 --- a/uncompyle6/parsers/parse32.py +++ b/uncompyle6/parsers/parse32.py @@ -25,10 +25,9 @@ class Python32Parser(Python3Parser): """ if_exp ::= expr jmp_false expr jump_forward_else expr COME_FROM - # compare_chained2 is used in a "chained_compare": x <= y <= z - # used exclusively in compare_chained - compare_chained2 ::= expr COMPARE_OP RETURN_VALUE - compare_chained2 ::= expr COMPARE_OP RETURN_VALUE_LAMBDA + # compare_chained_right is used in a "chained_compare": x <= y <= z + compare_chained_right ::= expr COMPARE_OP RETURN_VALUE + compare_chained_right ::= expr COMPARE_OP RETURN_VALUE_LAMBDA # Python < 3.5 no POP BLOCK whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM_LOOP diff --git a/uncompyle6/parsers/parse36.py b/uncompyle6/parsers/parse36.py index fd79035f..280b23ba 100644 --- a/uncompyle6/parsers/parse36.py +++ b/uncompyle6/parsers/parse36.py @@ -190,7 +190,7 @@ class Python36Parser(Python35Parser): tryfinally_return_stmt ::= SETUP_FINALLY suite_stmts_opt POP_BLOCK LOAD_CONST COME_FROM_FINALLY - compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD + compare_chained_right ::= expr COMPARE_OP come_froms JUMP_FORWARD """ # Some of this is duplicated from parse37. Eventually we'll probably rebase from diff --git a/uncompyle6/parsers/parse37.py b/uncompyle6/parsers/parse37.py index d73be25d..fad4d1ce 100644 --- a/uncompyle6/parsers/parse37.py +++ b/uncompyle6/parsers/parse37.py @@ -223,7 +223,7 @@ class Python37Parser(Python37BaseParser): # A compare_chained is two comparisions like x <= y <= z compare_chained ::= expr compared_chained_middle ROT_TWO POP_TOP _come_froms - compare_chained2 ::= expr COMPARE_OP JUMP_FORWARD + compare_chained_right ::= expr COMPARE_OP JUMP_FORWARD # Non-null kvlist items are broken out in the indiviual grammars kvlist ::= @@ -440,10 +440,10 @@ class Python37Parser(Python37BaseParser): """ if_exp::= expr jmp_false expr jump_forward_else expr COME_FROM - # compare_chained2 is used in a "chained_compare": x <= y <= z + # compare_chained_right is used in a "chained_compare": x <= y <= z # used exclusively in compare_chained - compare_chained2 ::= expr COMPARE_OP RETURN_VALUE - compare_chained2 ::= expr COMPARE_OP RETURN_VALUE_LAMBDA + compare_chained_right ::= expr COMPARE_OP RETURN_VALUE + compare_chained_right ::= expr COMPARE_OP RETURN_VALUE_LAMBDA # Python < 3.5 no POP BLOCK whileTruestmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM_LOOP @@ -631,36 +631,36 @@ class Python37Parser(Python37BaseParser): compare_chained37_false ::= expr compared_chained_middle_false_37 compare_chained37_false ::= expr compared_chained_middleb_false_37 - compare_chained37_false ::= expr compare_chained2_false_37 + compare_chained37_false ::= expr compare_chained_right_false_37 compared_chained_middlea_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE compared_chained_middlea_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2a_37 COME_FROM POP_TOP COME_FROM + compare_chained_righta_37 COME_FROM POP_TOP COME_FROM compared_chained_middleb_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2b_false_37 POP_TOP _jump COME_FROM + compare_chained_rightb_false_37 POP_TOP _jump COME_FROM compared_chained_middlec_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2a_37 POP_TOP + compare_chained_righta_37 POP_TOP compared_chained_middle_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2c_37 POP_TOP JUMP_FORWARD COME_FROM + compare_chained_rightc_37 POP_TOP JUMP_FORWARD COME_FROM compared_chained_middle_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2b_false_37 POP_TOP _jump COME_FROM + compare_chained_rightb_false_37 POP_TOP _jump COME_FROM - compare_chained2_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE - compare_chained2a_false_37 POP_TOP JUMP_BACK COME_FROM + compare_chained_right_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE + compare_chained_righta_false_37 POP_TOP JUMP_BACK COME_FROM - compare_chained2a_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_TRUE JUMP_FORWARD - compare_chained2a_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_TRUE JUMP_BACK - compare_chained2a_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE jf_cfs + compare_chained_righta_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_TRUE JUMP_FORWARD + compare_chained_righta_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_TRUE JUMP_BACK + compare_chained_righta_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE jf_cfs - compare_chained2b_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD COME_FROM - compare_chained2b_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD + compare_chained_rightb_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD COME_FROM + compare_chained_rightb_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD - compare_chained2c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE - compare_chained2a_false_37 ELSE - compare_chained2c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE - compare_chained2a_false_37 + compare_chained_rightc_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE + compare_chained_righta_false_37 ELSE + compare_chained_rightc_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE + compare_chained_righta_false_37 """ def p_37_conditionals(self, args): @@ -1022,7 +1022,7 @@ class Python37Parser(Python37BaseParser): compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP compared_chained_middle COME_FROM compared_chained_middle ::= expr DUP_TOP ROT_THREE COMPARE_OP JUMP_IF_FALSE_OR_POP - compare_chained2 COME_FROM + compare_chained_right COME_FROM """ def p_stmt3(self, args): @@ -1209,7 +1209,7 @@ class Python37Parser(Python37BaseParser): tryfinally_return_stmt ::= SETUP_FINALLY suite_stmts_opt POP_BLOCK LOAD_CONST COME_FROM_FINALLY - compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD + compare_chained_right ::= expr COMPARE_OP come_froms JUMP_FORWARD """ def p_37_misc(self, args): diff --git a/uncompyle6/semantics/consts.py b/uncompyle6/semantics/consts.py index 8981d521..18d5b9e2 100644 --- a/uncompyle6/semantics/consts.py +++ b/uncompyle6/semantics/consts.py @@ -339,7 +339,7 @@ TABLE_DIRECT = { "compare_single": ('%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19)), "compare_chained": ("%p %p", (0, 29), (1, 30)), "compared_chained_middle": ('%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (-2, 19)), - "compare_chained2": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)), + "compare_chained_right": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)), # "classdef": (), # handled by n_classdef() # A custom rule in n_function def distinguishes whether to call this or # function_def_async diff --git a/uncompyle6/semantics/customize37.py b/uncompyle6/semantics/customize37.py index 67613e0f..c0b2f2f7 100644 --- a/uncompyle6/semantics/customize37.py +++ b/uncompyle6/semantics/customize37.py @@ -109,7 +109,7 @@ def customize_for_version37(self, version): (0, PRECEDENCE["compare"] - 1), (-4, PRECEDENCE["compare"] - 1), ), - "compare_chained2_false_37": ( + "compare_chained_right_false_37": ( ' %[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (-5, PRECEDENCE["compare"] - 1), @@ -124,19 +124,19 @@ def customize_for_version37(self, version): (0, PRECEDENCE["compare"] - 1), (-2, PRECEDENCE["compare"] - 1), ), - "compare_chained2a_37": ( + "compare_chained_righta_37": ( '%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1), ), - "compare_chained2b_false_37": ( + "compare_chained_rightb_false_37": ( '%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1), ), - "compare_chained2a_false_37": ( + "compare_chained_righta_false_37": ( '%[1]{pattr.replace("-", " ")} %p', (0, PRECEDENCE["compare"] - 1), ), - "compare_chained2c_37": ( + "compare_chained_rightc_37": ( '%[3]{pattr.replace("-", " ")} %p %p', (0, PRECEDENCE["compare"] - 1), (6, PRECEDENCE["compare"] - 1),