You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Update test_grammar for Python 3.8
This commit is contained in:
@@ -3,48 +3,52 @@ from uncompyle6 import PYTHON_VERSION, PYTHON3, IS_PYPY # , PYTHON_VERSION
|
|||||||
from uncompyle6.parser import get_python_parser, python_parser
|
from uncompyle6.parser import get_python_parser, python_parser
|
||||||
from uncompyle6.scanner import get_scanner
|
from uncompyle6.scanner import get_scanner
|
||||||
|
|
||||||
def test_grammar():
|
|
||||||
|
|
||||||
|
def test_grammar():
|
||||||
def check_tokens(tokens, opcode_set):
|
def check_tokens(tokens, opcode_set):
|
||||||
remain_tokens = set(tokens) - opcode_set
|
remain_tokens = set(tokens) - opcode_set
|
||||||
remain_tokens = set([re.sub(r'_\d+$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub(r"_\d+$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('_CONT$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("_CONT$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set([re.sub('LOAD_CODE$','', t) for t in remain_tokens])
|
remain_tokens = set([re.sub("LOAD_CODE$", "", t) for t in remain_tokens])
|
||||||
remain_tokens = set(remain_tokens) - opcode_set
|
remain_tokens = set(remain_tokens) - opcode_set
|
||||||
assert remain_tokens == set([]), \
|
assert remain_tokens == set([]), "Remaining tokens %s\n====\n%s" % (
|
||||||
"Remaining tokens %s\n====\n%s" % (remain_tokens, p.dump_grammar())
|
remain_tokens,
|
||||||
|
p.dump_grammar(),
|
||||||
|
)
|
||||||
|
|
||||||
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
p = get_python_parser(PYTHON_VERSION, is_pypy=IS_PYPY)
|
||||||
(lhs, rhs, tokens,
|
(lhs, rhs, tokens, right_recursive, dup_rhs) = p.check_sets()
|
||||||
right_recursive, dup_rhs) = p.check_sets()
|
|
||||||
|
|
||||||
# We have custom rules that create the below
|
# We have custom rules that create the below
|
||||||
expect_lhs = set(['pos_arg', 'attribute'])
|
expect_lhs = set(["pos_arg", "attribute"])
|
||||||
if PYTHON_VERSION < 3.8:
|
if PYTHON_VERSION < 3.8:
|
||||||
expect_lhs.add('get_iter')
|
expect_lhs.add("get_iter")
|
||||||
|
else:
|
||||||
|
expect_lhs.add("async_with_as_stmt")
|
||||||
|
expect_lhs.add("async_with_stmt")
|
||||||
|
|
||||||
|
unused_rhs = set(["list", "mkfunc", "mklambda", "unpack"])
|
||||||
|
|
||||||
unused_rhs = set(['list', 'mkfunc',
|
expect_right_recursive = set([("designList", ("store", "DUP_TOP", "designList"))])
|
||||||
'mklambda',
|
|
||||||
'unpack',])
|
|
||||||
|
|
||||||
expect_right_recursive = set([('designList',
|
|
||||||
('store', 'DUP_TOP', 'designList'))])
|
|
||||||
|
|
||||||
if PYTHON_VERSION < 3.7:
|
if PYTHON_VERSION < 3.7:
|
||||||
unused_rhs.add('call')
|
unused_rhs.add("call")
|
||||||
|
|
||||||
if PYTHON_VERSION > 2.6:
|
if PYTHON_VERSION > 2.6:
|
||||||
expect_lhs.add('kvlist')
|
expect_lhs.add("kvlist")
|
||||||
expect_lhs.add('kv3')
|
expect_lhs.add("kv3")
|
||||||
unused_rhs.add('dict')
|
unused_rhs.add("dict")
|
||||||
|
|
||||||
if PYTHON3:
|
if PYTHON3:
|
||||||
expect_lhs.add('load_genexpr')
|
expect_lhs.add("load_genexpr")
|
||||||
|
|
||||||
unused_rhs = unused_rhs.union(set("""
|
unused_rhs = unused_rhs.union(
|
||||||
|
set(
|
||||||
|
"""
|
||||||
except_pop_except generator_exp
|
except_pop_except generator_exp
|
||||||
""".split()))
|
""".split()
|
||||||
|
)
|
||||||
|
)
|
||||||
if PYTHON_VERSION >= 3.0:
|
if PYTHON_VERSION >= 3.0:
|
||||||
expect_lhs.add("annotate_arg")
|
expect_lhs.add("annotate_arg")
|
||||||
expect_lhs.add("annotate_tuple")
|
expect_lhs.add("annotate_tuple")
|
||||||
@@ -53,17 +57,19 @@ def test_grammar():
|
|||||||
unused_rhs.add("classdefdeco1")
|
unused_rhs.add("classdefdeco1")
|
||||||
unused_rhs.add("tryelsestmtl")
|
unused_rhs.add("tryelsestmtl")
|
||||||
if PYTHON_VERSION >= 3.5:
|
if PYTHON_VERSION >= 3.5:
|
||||||
expect_right_recursive.add((('l_stmts',
|
expect_right_recursive.add(
|
||||||
('lastl_stmt', 'come_froms', 'l_stmts'))))
|
(("l_stmts", ("lastl_stmt", "come_froms", "l_stmts")))
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
elif 3.0 < PYTHON_VERSION < 3.3:
|
elif 3.0 < PYTHON_VERSION < 3.3:
|
||||||
expect_right_recursive.add((('l_stmts',
|
expect_right_recursive.add(
|
||||||
('lastl_stmt', 'COME_FROM', 'l_stmts'))))
|
(("l_stmts", ("lastl_stmt", "COME_FROM", "l_stmts")))
|
||||||
|
)
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
expect_lhs.add('kwarg')
|
expect_lhs.add("kwarg")
|
||||||
|
|
||||||
assert expect_lhs == set(lhs)
|
assert expect_lhs == set(lhs)
|
||||||
|
|
||||||
@@ -73,9 +79,16 @@ def test_grammar():
|
|||||||
|
|
||||||
assert expect_right_recursive == right_recursive
|
assert expect_right_recursive == right_recursive
|
||||||
|
|
||||||
expect_dup_rhs = frozenset([('COME_FROM',), ('CONTINUE',), ('JUMP_ABSOLUTE',),
|
expect_dup_rhs = frozenset(
|
||||||
('LOAD_CONST',),
|
[
|
||||||
('JUMP_BACK',), ('JUMP_FORWARD',)])
|
("COME_FROM",),
|
||||||
|
("CONTINUE",),
|
||||||
|
("JUMP_ABSOLUTE",),
|
||||||
|
("LOAD_CONST",),
|
||||||
|
("JUMP_BACK",),
|
||||||
|
("JUMP_FORWARD",),
|
||||||
|
]
|
||||||
|
)
|
||||||
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
reduced_dup_rhs = dict((k, dup_rhs[k]) for k in dup_rhs if k not in expect_dup_rhs)
|
||||||
for k in reduced_dup_rhs:
|
for k in reduced_dup_rhs:
|
||||||
print(k, reduced_dup_rhs[k])
|
print(k, reduced_dup_rhs[k])
|
||||||
@@ -92,22 +105,33 @@ def test_grammar():
|
|||||||
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
|
LOAD_GENEXPR LOAD_ASSERT LOAD_SETCOMP LOAD_DICTCOMP LOAD_STR LOAD_CODE
|
||||||
LAMBDA_MARKER
|
LAMBDA_MARKER
|
||||||
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
RETURN_END_IF RETURN_END_IF_LAMBDA RETURN_VALUE_LAMBDA RETURN_LAST
|
||||||
""".split())
|
""".split()
|
||||||
|
)
|
||||||
if 2.6 <= PYTHON_VERSION <= 2.7:
|
if 2.6 <= PYTHON_VERSION <= 2.7:
|
||||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
if PYTHON_VERSION == 2.6:
|
if PYTHON_VERSION == 2.6:
|
||||||
opcode_set.add("THEN")
|
opcode_set.add("THEN")
|
||||||
check_tokens(tokens, opcode_set)
|
check_tokens(tokens, opcode_set)
|
||||||
elif PYTHON_VERSION == 3.4:
|
elif PYTHON_VERSION == 3.4:
|
||||||
ignore_set.add('LOAD_CLASSNAME')
|
ignore_set.add("LOAD_CLASSNAME")
|
||||||
ignore_set.add('STORE_LOCALS')
|
ignore_set.add("STORE_LOCALS")
|
||||||
opcode_set = set(s.opc.opname).union(ignore_set)
|
opcode_set = set(s.opc.opname).union(ignore_set)
|
||||||
check_tokens(tokens, opcode_set)
|
check_tokens(tokens, opcode_set)
|
||||||
|
|
||||||
|
|
||||||
def test_dup_rule():
|
def test_dup_rule():
|
||||||
import inspect
|
import inspect
|
||||||
python_parser(PYTHON_VERSION, inspect.currentframe().f_code,
|
|
||||||
|
python_parser(
|
||||||
|
PYTHON_VERSION,
|
||||||
|
inspect.currentframe().f_code,
|
||||||
is_pypy=IS_PYPY,
|
is_pypy=IS_PYPY,
|
||||||
parser_debug={
|
parser_debug={
|
||||||
'dups': True, 'transition': False, 'reduce': False,
|
"dups": True,
|
||||||
'rules': False, 'errorstack': None, 'context': True})
|
"transition": False,
|
||||||
|
"reduce": False,
|
||||||
|
"rules": False,
|
||||||
|
"errorstack": None,
|
||||||
|
"context": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
Reference in New Issue
Block a user