diff --git a/uncompyle6/parsers/parse27.py b/uncompyle6/parsers/parse27.py index 82667da9..5837e9cb 100644 --- a/uncompyle6/parsers/parse27.py +++ b/uncompyle6/parsers/parse27.py @@ -6,6 +6,7 @@ from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from xdis import next_offset from uncompyle6.parser import PythonParserSingle, nop_func from uncompyle6.parsers.parse2 import Python2Parser +from uncompyle6.parsers.reducecheck import ifelsestmt class Python27Parser(Python2Parser): @@ -216,6 +217,12 @@ class Python27Parser(Python2Parser): super(Python27Parser, self).customize_grammar_rules(tokens, customize) + + # FIXME: Put more in this table + self.reduce_check_table = { + "ifelsestmt": ifelsestmt, + } + self.check_reduce["and"] = "AST" self.check_reduce["conditional"] = "AST" @@ -225,6 +232,7 @@ class Python27Parser(Python2Parser): # self.check_reduce["or"] = "AST" self.check_reduce["raise_stmt1"] = "AST" self.check_reduce["iflaststmtl"] = "AST" + self.check_reduce["ifelsestmt"] = "AST" self.check_reduce["list_if_not"] = "AST" self.check_reduce["list_if"] = "AST" self.check_reduce["comp_if"] = "AST" @@ -238,6 +246,12 @@ class Python27Parser(Python2Parser): self).reduce_is_invalid(rule, ast, tokens, first, last) + lhs = rule[0] + n = len(tokens) + fn = self.reduce_check_table.get(lhs, None) + if fn: + return fn(self, lhs, n, rule, ast, tokens, first, last) + last = min(last, n-1) if invalid: return invalid diff --git a/uncompyle6/parsers/reducecheck/ifelsestmt.py b/uncompyle6/parsers/reducecheck/ifelsestmt.py index 55fcc7d9..81d059b4 100644 --- a/uncompyle6/parsers/reducecheck/ifelsestmt.py +++ b/uncompyle6/parsers/reducecheck/ifelsestmt.py @@ -39,6 +39,16 @@ def ifelsestmt(self, lhs, n, rule, ast, tokens, first, last): "\\e_opt_come_from_except", ), ), + ( + "ifelsestmt", + ( + "testexpr", + "c_stmts_opt", + "JUMP_FORWARD", + "else_suite", + "come_froms", + ), + ), ( "ifelsestmt", ("testexpr", "c_stmts", "come_froms", "else_suite", "come_froms",), @@ -79,14 +89,13 @@ def ifelsestmt(self, lhs, n, rule, ast, tokens, first, last): elif tokens[first].offset > come_froms.attr: return True - # For mysterious reasons a COME_FROM in tokens[last+1] might be part of the grammar rule - # even though it is not found in come_froms. - # Work around this. - if ( - last < n - and tokens[last] == "COME_FROM" - and tokens[first].offset > tokens[last].attr - ): + # FIXME: There is weirdness in the grammar we need to work around. + # we need to clean up the grammar. + if self.version < 3.0: + last_token = ast[-1] + else: + last_token = tokens[last] + if last_token == "COME_FROM" and tokens[first].offset > last_token.attr: return True testexpr = ast[0]