You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Go over 2.x grammar testing
This commit is contained in:
@@ -1,38 +1,53 @@
|
|||||||
import re
|
import re
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE
|
||||||
|
|
||||||
from uncompyle6.parser import get_python_parser, python_parser
|
from uncompyle6.parser import get_python_parser, python_parser
|
||||||
from xdis.version_info import PYTHON_VERSION_TRIPLE, IS_PYPY
|
|
||||||
|
|
||||||
class TestGrammar(unittest.TestCase):
|
class TestGrammar(unittest.TestCase):
|
||||||
def test_grammar(self):
|
def test_grammar(self):
|
||||||
|
|
||||||
def check_tokens(tokens, opcode_set):
|
def check_tokens(tokens, opcode_set):
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
remain_tokens = set([re.sub('_\d+$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("_\d+$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
self.assertEqual(remain_tokens, set([]),
|
self.assertEqual(
|
||||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar()))
|
remain_tokens,
|
||||||
|
set([]),
|
||||||
|
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar()),
|
||||||
|
)
|
||||||
|
|
||||||
p = get_python_parser(PYTHON_VERSION_TRIPLE, is_pypy=IS_PYPY)
|
p = get_python_parser(PYTHON_VERSION_TRIPLE, is_pypy=IS_PYPY)
|
||||||
(lhs, rhs, tokens,
|
(lhs, rhs, tokens, right_recursive, dup_rhs) = p.check_sets()
|
||||||
right_recursive, dup_rhs) = p.check_sets()
|
expect_lhs = set(["pos_arg", "get_iter", "attribute"])
|
||||||
expect_lhs = set(['pos_arg', 'get_iter', 'attribute'])
|
unused_rhs = set(["list", "call", "mkfunc", "unpack", "lambda_body"])
|
||||||
unused_rhs = set(['list', 'call', 'mkfunc',
|
|
||||||
'mklambda',
|
|
||||||
'unpack',])
|
|
||||||
|
|
||||||
expect_right_recursive = frozenset([('designList',
|
expect_right_recursive = frozenset(
|
||||||
('store', 'DUP_TOP', 'designList'))])
|
[("designList", ("store", "DUP_TOP", "designList"))]
|
||||||
expect_lhs.add('kwarg')
|
)
|
||||||
|
expect_lhs.add("kwarg")
|
||||||
|
|
||||||
|
if PYTHON_VERSION_TRIPLE[:2] == (2, 7):
|
||||||
|
expect_lhs.add("kv3")
|
||||||
|
expect_lhs.add("kvlist")
|
||||||
|
unused_rhs.add("dict")
|
||||||
|
|
||||||
self.assertEqual(expect_lhs, set(lhs))
|
self.assertEqual(expect_lhs, set(lhs))
|
||||||
self.assertEqual(unused_rhs, set(rhs))
|
self.assertEqual(unused_rhs, set(rhs))
|
||||||
self.assertEqual(expect_right_recursive, right_recursive)
|
self.assertEqual(expect_right_recursive, right_recursive)
|
||||||
|
|
||||||
expect_dup_rhs = frozenset([('COME_FROM',), ('CONTINUE',), ('JUMP_ABSOLUTE',),
|
expect_dup_rhs = frozenset(
|
||||||
('LOAD_CONST',),
|
[
|
||||||
('JUMP_BACK',), ('JUMP_FORWARD',)])
|
("COME_FROM",),
|
||||||
|
("CONTINUE",),
|
||||||
|
("JUMP_ABSOLUTE",),
|
||||||
|
("LOAD_CONST",),
|
||||||
|
("JUMP_BACK",),
|
||||||
|
("JUMP_FORWARD",),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
reduced_dup_rhs = {}
|
reduced_dup_rhs = {}
|
||||||
for k in dup_rhs:
|
for k in dup_rhs:
|
||||||
@@ -47,10 +62,21 @@ class TestGrammar(unittest.TestCase):
|
|||||||
# FIXME: Something got borked here
|
# FIXME: Something got borked here
|
||||||
def no_test_dup_rule(self):
|
def no_test_dup_rule(self):
|
||||||
import inspect
|
import inspect
|
||||||
python_parser(PYTHON_VERSION_TRIPLE, inspect.currentframe().f_code,
|
|
||||||
is_pypy=IS_PYPY,
|
python_parser(
|
||||||
parser_debug={
|
PYTHON_VERSION_TRIPLE,
|
||||||
'dups': True, 'transition': False, 'reduce': False,
|
inspect.currentframe().f_code,
|
||||||
'rules': False, 'errorstack': None, 'context': True})
|
is_pypy=IS_PYPY,
|
||||||
if __name__ == '__main__':
|
parser_debug={
|
||||||
|
"dups": True,
|
||||||
|
"transition": False,
|
||||||
|
"reduce": False,
|
||||||
|
"rules": False,
|
||||||
|
"errorstack": None,
|
||||||
|
"context": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@@ -21,10 +21,11 @@ Common uncompyle6 parser routines.
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from spark_parser import GenericASTBuilder, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG, GenericASTBuilder
|
||||||
from uncompyle6.show import maybe_show_asm
|
|
||||||
from xdis import iscode
|
from xdis import iscode
|
||||||
|
|
||||||
|
from uncompyle6.show import maybe_show_asm
|
||||||
|
|
||||||
|
|
||||||
class ParserError(Exception):
|
class ParserError(Exception):
|
||||||
def __init__(self, token, offset, debug=PARSER_DEFAULT_DEBUG):
|
def __init__(self, token, offset, debug=PARSER_DEFAULT_DEBUG):
|
||||||
@@ -91,7 +92,14 @@ class PythonParser(GenericASTBuilder):
|
|||||||
# singleton reduction that we can simplify. It also happens to be optional
|
# singleton reduction that we can simplify. It also happens to be optional
|
||||||
# in its other derivation
|
# in its other derivation
|
||||||
self.optional_nt |= frozenset(
|
self.optional_nt |= frozenset(
|
||||||
("come_froms", "suite_stmts", "l_stmts_opt", "c_stmts_opt", "stmts_opt", "stmt")
|
(
|
||||||
|
"come_froms",
|
||||||
|
"suite_stmts",
|
||||||
|
"l_stmts_opt",
|
||||||
|
"c_stmts_opt",
|
||||||
|
"stmts_opt",
|
||||||
|
"stmt",
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Reduce singleton reductions in these nonterminals:
|
# Reduce singleton reductions in these nonterminals:
|
||||||
@@ -113,10 +121,10 @@ class PythonParser(GenericASTBuilder):
|
|||||||
|
|
||||||
def add_unique_rule(self, rule, opname, arg_count, customize):
|
def add_unique_rule(self, rule, opname, arg_count, customize):
|
||||||
"""Add rule to grammar, but only if it hasn't been added previously
|
"""Add rule to grammar, but only if it hasn't been added previously
|
||||||
opname and stack_count are used in the customize() semantic
|
opname and stack_count are used in the customize() semantic
|
||||||
the actions to add the semantic action rule. Stack_count is
|
the actions to add the semantic action rule. Stack_count is
|
||||||
used in custom opcodes like MAKE_FUNCTION to indicate how
|
used in custom opcodes like MAKE_FUNCTION to indicate how
|
||||||
many arguments it has. Often it is not used.
|
many arguments it has. Often it is not used.
|
||||||
"""
|
"""
|
||||||
if rule not in self.new_rules:
|
if rule not in self.new_rules:
|
||||||
# print("XXX ", rule) # debug
|
# print("XXX ", rule) # debug
|
||||||
@@ -223,7 +231,9 @@ class PythonParser(GenericASTBuilder):
|
|||||||
"""
|
"""
|
||||||
# Low byte indicates number of positional parameters,
|
# Low byte indicates number of positional parameters,
|
||||||
# high byte number of keyword parameters
|
# high byte number of keyword parameters
|
||||||
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith("CALL_METHOD")
|
assert token.kind.startswith("CALL_FUNCTION") or token.kind.startswith(
|
||||||
|
"CALL_METHOD"
|
||||||
|
)
|
||||||
args_pos = token.attr & 0xFF
|
args_pos = token.attr & 0xFF
|
||||||
args_kw = (token.attr >> 8) & 0xFF
|
args_kw = (token.attr >> 8) & 0xFF
|
||||||
return args_pos, args_kw
|
return args_pos, args_kw
|
||||||
@@ -304,9 +314,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
c_stmts ::= lastc_stmt
|
c_stmts ::= lastc_stmt
|
||||||
c_stmts ::= continues
|
c_stmts ::= continues
|
||||||
|
|
||||||
ending_return ::= RETURN_VALUE RETURN_LAST
|
|
||||||
ending_return ::= RETURN_VALUE_LAMBDA LAMBDA_MARKER
|
|
||||||
|
|
||||||
lastc_stmt ::= iflaststmt
|
lastc_stmt ::= iflaststmt
|
||||||
lastc_stmt ::= forelselaststmt
|
lastc_stmt ::= forelselaststmt
|
||||||
lastc_stmt ::= ifelsestmtc
|
lastc_stmt ::= ifelsestmtc
|
||||||
@@ -314,9 +321,6 @@ class PythonParser(GenericASTBuilder):
|
|||||||
c_stmts_opt ::= c_stmts
|
c_stmts_opt ::= c_stmts
|
||||||
c_stmts_opt ::= pass
|
c_stmts_opt ::= pass
|
||||||
|
|
||||||
stmts_opt ::= _stmts
|
|
||||||
stmts_opt ::= pass
|
|
||||||
|
|
||||||
# statements inside a loop
|
# statements inside a loop
|
||||||
l_stmts ::= _stmts
|
l_stmts ::= _stmts
|
||||||
l_stmts ::= returns
|
l_stmts ::= returns
|
||||||
@@ -907,9 +911,12 @@ def python_parser(
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
||||||
def parse_test(co):
|
def parse_test(co):
|
||||||
from xdis.version_info import PYTHON_VERSION_TRIPLE, IS_PYPY
|
from xdis.version_info import IS_PYPY, PYTHON_VERSION_TRIPLE
|
||||||
|
|
||||||
ast = python_parser(PYTHON_VERSION_TRIPLE[:2], co, showasm=True, is_pypy=IS_PYPY)
|
ast = python_parser(
|
||||||
|
PYTHON_VERSION_TRIPLE[:2], co, showasm=True, is_pypy=IS_PYPY
|
||||||
|
)
|
||||||
print(ast)
|
print(ast)
|
||||||
return
|
return
|
||||||
|
|
||||||
parse_test(parse_test.func_code)
|
parse_test(parse_test.func_code)
|
||||||
|
@@ -38,6 +38,9 @@ class Python27Parser(Python2Parser):
|
|||||||
|
|
||||||
stmt ::= dict_comp_func
|
stmt ::= dict_comp_func
|
||||||
|
|
||||||
|
ending_return ::= RETURN_VALUE RETURN_LAST
|
||||||
|
ending_return ::= RETURN_VALUE_LAMBDA LAMBDA_MARKER
|
||||||
|
|
||||||
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
dict_comp_func ::= BUILD_MAP_0 LOAD_FAST FOR_ITER store
|
||||||
comp_iter JUMP_BACK ending_return
|
comp_iter JUMP_BACK ending_return
|
||||||
|
|
||||||
|
@@ -27,22 +27,24 @@ that a later phase can turn into a sequence of ASCII text.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from uncompyle6.scanners.tok import Token
|
|
||||||
|
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||||
|
|
||||||
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func
|
||||||
from uncompyle6.parsers.reducecheck import (
|
from uncompyle6.parsers.reducecheck import (
|
||||||
and_invalid,
|
and_invalid,
|
||||||
except_handler_else,
|
except_handler_else,
|
||||||
ifelsestmt,
|
ifelsestmt,
|
||||||
ifstmt,
|
|
||||||
iflaststmt,
|
iflaststmt,
|
||||||
|
ifstmt,
|
||||||
or_check,
|
or_check,
|
||||||
testtrue,
|
testtrue,
|
||||||
tryelsestmtl3,
|
tryelsestmtl3,
|
||||||
tryexcept,
|
tryexcept,
|
||||||
while1stmt
|
while1stmt,
|
||||||
)
|
)
|
||||||
from uncompyle6.parsers.treenode import SyntaxTree
|
from uncompyle6.parsers.treenode import SyntaxTree
|
||||||
from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
from uncompyle6.scanners.tok import Token
|
||||||
|
|
||||||
|
|
||||||
class Python3Parser(PythonParser):
|
class Python3Parser(PythonParser):
|
||||||
@@ -79,6 +81,9 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
stmt ::= set_comp_func
|
stmt ::= set_comp_func
|
||||||
|
|
||||||
|
ending_return ::= RETURN_VALUE RETURN_LAST
|
||||||
|
ending_return ::= RETURN_VALUE_LAMBDA LAMBDA_MARKER
|
||||||
|
|
||||||
# TODO this can be simplified
|
# TODO this can be simplified
|
||||||
set_comp_func ::= BUILD_SET_0 LOAD_ARG FOR_ITER store comp_iter
|
set_comp_func ::= BUILD_SET_0 LOAD_ARG FOR_ITER store comp_iter
|
||||||
JUMP_BACK ending_return
|
JUMP_BACK ending_return
|
||||||
@@ -98,7 +103,7 @@ class Python3Parser(PythonParser):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def p_dict_comp3(self, args):
|
def p_dict_comp3(self, args):
|
||||||
""""
|
""" "
|
||||||
expr ::= dict_comp
|
expr ::= dict_comp
|
||||||
stmt ::= dict_comp_func
|
stmt ::= dict_comp_func
|
||||||
dict_comp_func ::= BUILD_MAP_0 LOAD_ARG FOR_ITER store
|
dict_comp_func ::= BUILD_MAP_0 LOAD_ARG FOR_ITER store
|
||||||
@@ -519,7 +524,7 @@ class Python3Parser(PythonParser):
|
|||||||
expr
|
expr
|
||||||
call
|
call
|
||||||
CALL_FUNCTION_3
|
CALL_FUNCTION_3
|
||||||
"""
|
"""
|
||||||
# FIXME: I bet this can be simplified
|
# FIXME: I bet this can be simplified
|
||||||
# look for next MAKE_FUNCTION
|
# look for next MAKE_FUNCTION
|
||||||
j = i
|
j = i
|
||||||
@@ -627,7 +632,11 @@ class Python3Parser(PythonParser):
|
|||||||
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
self.add_unique_rule(rule, token.kind, uniq_param, customize)
|
||||||
|
|
||||||
if "LOAD_BUILD_CLASS" in self.seen_ops:
|
if "LOAD_BUILD_CLASS" in self.seen_ops:
|
||||||
if next_token == "CALL_FUNCTION" and next_token.attr == 1 and pos_args_count > 1:
|
if (
|
||||||
|
next_token == "CALL_FUNCTION"
|
||||||
|
and next_token.attr == 1
|
||||||
|
and pos_args_count > 1
|
||||||
|
):
|
||||||
rule = "classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d" % (
|
rule = "classdefdeco2 ::= LOAD_BUILD_CLASS mkfunc %s%s_%d" % (
|
||||||
("expr " * (pos_args_count - 1)),
|
("expr " * (pos_args_count - 1)),
|
||||||
opname,
|
opname,
|
||||||
@@ -766,18 +775,24 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
elif opname in ("BUILD_CONST_LIST", "BUILD_CONST_DICT", "BUILD_CONST_SET"):
|
elif opname in ("BUILD_CONST_LIST", "BUILD_CONST_DICT", "BUILD_CONST_SET"):
|
||||||
if opname == "BUILD_CONST_DICT":
|
if opname == "BUILD_CONST_DICT":
|
||||||
rule = """
|
rule = (
|
||||||
|
"""
|
||||||
add_consts ::= ADD_VALUE*
|
add_consts ::= ADD_VALUE*
|
||||||
const_list ::= COLLECTION_START add_consts %s
|
const_list ::= COLLECTION_START add_consts %s
|
||||||
dict ::= const_list
|
dict ::= const_list
|
||||||
expr ::= dict
|
expr ::= dict
|
||||||
""" % opname
|
"""
|
||||||
|
% opname
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
rule = """
|
rule = (
|
||||||
|
"""
|
||||||
add_consts ::= ADD_VALUE*
|
add_consts ::= ADD_VALUE*
|
||||||
const_list ::= COLLECTION_START add_consts %s
|
const_list ::= COLLECTION_START add_consts %s
|
||||||
expr ::= const_list
|
expr ::= const_list
|
||||||
""" % opname
|
"""
|
||||||
|
% opname
|
||||||
|
)
|
||||||
self.addRule(rule, nop_func)
|
self.addRule(rule, nop_func)
|
||||||
|
|
||||||
elif opname.startswith("BUILD_DICT_OLDER"):
|
elif opname.startswith("BUILD_DICT_OLDER"):
|
||||||
@@ -932,7 +947,6 @@ class Python3Parser(PythonParser):
|
|||||||
"CALL_FUNCTION_VAR_KW",
|
"CALL_FUNCTION_VAR_KW",
|
||||||
)
|
)
|
||||||
) or opname.startswith("CALL_FUNCTION_KW"):
|
) or opname.startswith("CALL_FUNCTION_KW"):
|
||||||
|
|
||||||
if opname == "CALL_FUNCTION" and token.attr == 1:
|
if opname == "CALL_FUNCTION" and token.attr == 1:
|
||||||
rule = """
|
rule = """
|
||||||
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
|
dict_comp ::= LOAD_DICTCOMP LOAD_STR MAKE_FUNCTION_0 expr
|
||||||
@@ -1108,7 +1122,8 @@ class Python3Parser(PythonParser):
|
|||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("pos_arg " * pos_args_count, opname)
|
||||||
)
|
)
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
@@ -1194,7 +1209,6 @@ class Python3Parser(PythonParser):
|
|||||||
)
|
)
|
||||||
self.add_unique_rule(rule, opname, token.attr, customize)
|
self.add_unique_rule(rule, opname, token.attr, customize)
|
||||||
|
|
||||||
|
|
||||||
if self.version >= (3, 4):
|
if self.version >= (3, 4):
|
||||||
if not self.is_pypy:
|
if not self.is_pypy:
|
||||||
load_op = "LOAD_STR"
|
load_op = "LOAD_STR"
|
||||||
@@ -1278,14 +1292,16 @@ class Python3Parser(PythonParser):
|
|||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"generator_exp ::= %sload_genexpr %%s%s expr "
|
"generator_exp ::= %sload_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("pos_arg " * pos_args_count, opname)
|
||||||
)
|
)
|
||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
)
|
)
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
"generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("pos_arg " * pos_args_count, opname)
|
||||||
)
|
)
|
||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
@@ -1337,7 +1353,8 @@ class Python3Parser(PythonParser):
|
|||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"generator_exp ::= %sload_genexpr %%s%s expr "
|
"generator_exp ::= %sload_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ("pos_arg " * pos_args_count, opname)
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("pos_arg " * pos_args_count, opname)
|
||||||
)
|
)
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
@@ -1349,7 +1366,8 @@ class Python3Parser(PythonParser):
|
|||||||
# Todo: For Pypy we need to modify this slightly
|
# Todo: For Pypy we need to modify this slightly
|
||||||
rule_pat = (
|
rule_pat = (
|
||||||
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
"listcomp ::= %sLOAD_LISTCOMP %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ("expr " * pos_args_count, opname)
|
"GET_ITER CALL_FUNCTION_1"
|
||||||
|
% ("expr " * pos_args_count, opname)
|
||||||
)
|
)
|
||||||
self.add_make_function_rule(
|
self.add_make_function_rule(
|
||||||
rule_pat, opname, token.attr, customize
|
rule_pat, opname, token.attr, customize
|
||||||
@@ -1580,7 +1598,7 @@ class Python3Parser(PythonParser):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if self.version == (3, 6):
|
if self.version == (3, 6):
|
||||||
self.reduce_check_table["and"] = and_invalid
|
self.reduce_check_table["and"] = and_invalid
|
||||||
self.check_reduce["and"] = "AST"
|
self.check_reduce["and"] = "AST"
|
||||||
|
|
||||||
self.check_reduce["annotate_tuple"] = "noAST"
|
self.check_reduce["annotate_tuple"] = "noAST"
|
||||||
@@ -1610,7 +1628,7 @@ class Python3Parser(PythonParser):
|
|||||||
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
def reduce_is_invalid(self, rule, ast, tokens, first, last):
|
||||||
lhs = rule[0]
|
lhs = rule[0]
|
||||||
n = len(tokens)
|
n = len(tokens)
|
||||||
last = min(last, n-1)
|
last = min(last, n - 1)
|
||||||
fn = self.reduce_check_table.get(lhs, None)
|
fn = self.reduce_check_table.get(lhs, None)
|
||||||
if fn:
|
if fn:
|
||||||
if fn(self, lhs, n, rule, ast, tokens, first, last):
|
if fn(self, lhs, n, rule, ast, tokens, first, last):
|
||||||
@@ -1636,13 +1654,18 @@ class Python3Parser(PythonParser):
|
|||||||
condition_jump2 = tokens[min(last - 1, len(tokens) - 1)]
|
condition_jump2 = tokens[min(last - 1, len(tokens) - 1)]
|
||||||
# If there are two *distinct* condition jumps, they should not jump to the
|
# If there are two *distinct* condition jumps, they should not jump to the
|
||||||
# same place. Otherwise we have some sort of "and"/"or".
|
# same place. Otherwise we have some sort of "and"/"or".
|
||||||
if condition_jump2.kind.startswith("POP_JUMP_IF") and condition_jump != condition_jump2:
|
if (
|
||||||
|
condition_jump2.kind.startswith("POP_JUMP_IF")
|
||||||
|
and condition_jump != condition_jump2
|
||||||
|
):
|
||||||
return condition_jump.attr == condition_jump2.attr
|
return condition_jump.attr == condition_jump2.attr
|
||||||
|
|
||||||
if tokens[last] == "COME_FROM" and tokens[last].off2int() != condition_jump.attr:
|
if (
|
||||||
|
tokens[last] == "COME_FROM"
|
||||||
|
and tokens[last].off2int() != condition_jump.attr
|
||||||
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
# if condition_jump.attr < condition_jump2.off2int():
|
# if condition_jump.attr < condition_jump2.off2int():
|
||||||
# print("XXX", first, last)
|
# print("XXX", first, last)
|
||||||
# for t in range(first, last): print(tokens[t])
|
# for t in range(first, last): print(tokens[t])
|
||||||
@@ -1664,7 +1687,6 @@ class Python3Parser(PythonParser):
|
|||||||
< tokens[last].off2int()
|
< tokens[last].off2int()
|
||||||
)
|
)
|
||||||
elif lhs == "while1stmt":
|
elif lhs == "while1stmt":
|
||||||
|
|
||||||
if while1stmt(self, lhs, n, rule, ast, tokens, first, last):
|
if while1stmt(self, lhs, n, rule, ast, tokens, first, last):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1686,7 +1708,6 @@ class Python3Parser(PythonParser):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
elif lhs == "while1elsestmt":
|
elif lhs == "while1elsestmt":
|
||||||
|
|
||||||
n = len(tokens)
|
n = len(tokens)
|
||||||
if last == n:
|
if last == n:
|
||||||
# Adjust for fuzziness in parsing
|
# Adjust for fuzziness in parsing
|
||||||
|
Reference in New Issue
Block a user