Go over "yield" and other precedence

This commit is contained in:
rocky
2020-04-09 20:27:07 -04:00
parent 42d3c4db61
commit 5c6365d8a1
4 changed files with 421 additions and 364 deletions

View File

@@ -0,0 +1,87 @@
# From test_grammar.py
# RUNNABLE!
def check_syntax_error(statement):
try:
compile(statement, '<test string>', 'exec')
except SyntaxError:
return
assert False
def test_yield():
# Requires parentheses as call argument
def g():
f((yield 1), 1)
def g():
f((yield from ()))
def g():
f((yield from ()), 1)
def g():
f((yield 1))
# Allowed as standalone statement
def g():
yield 1
def g():
yield from ()
# Allowed as RHS of assignment
def g():
x = yield 1
def g():
x = yield from ()
# Ordinary yield accepts implicit tuples
def g():
yield 1, 1
def g():
x = yield 1, 1
# 'yield from' does not
check_syntax_error("def g(): yield from (), 1")
check_syntax_error("def g(): x = yield from (), 1")
# Requires parentheses as subexpression
def g():
1, (yield 1)
def g():
1, (yield from ())
check_syntax_error("def g(): 1, yield 1")
check_syntax_error("def g(): 1, yield from ()")
# Requires parentheses as call argument
def g():
f((yield 1))
def g():
f((yield 1), 1)
def g():
f((yield from ()))
def g():
f((yield from ()), 1)
check_syntax_error("def g(): f(yield 1)")
check_syntax_error("def g(): f(yield 1, 1)")
check_syntax_error("def g(): f(yield from ())")
check_syntax_error("def g(): f(yield from (), 1)")
# Not allowed at top level
check_syntax_error("yield")
check_syntax_error("yield from")
# Not allowed at class scope
check_syntax_error("class foo:yield 1")
check_syntax_error("class foo:yield from ()")
# Check annotation refleak on SyntaxError
check_syntax_error("def g(a:(yield)): pass")
test_yield()
# From test_types.py
# Bug was needing parens around (yield 2)
def gen_func():
yield 1
return (yield 2)
gen = gen_func()

View File

@@ -51,105 +51,121 @@ else:
# call((.. op ..)).
PRECEDENCE = {
'yield': 102,
'yield_from': 102,
"named_expr": 40, # :=
"yield": 38, # Needs to be below named_expr
"yield_from": 38,
'_mklambda': 30,
"mklambda": 30,
'if_exp': 28, # If_Exp expression
'if_exp_lamdba': 28, # Lambda expression
'if_exp_not_lamdba': 28, # Lambda expression
'if_exp_not': 28,
'if_exp_true': 28,
'if_exp_ret': 28,
"if_exp": 28, # IfExp ( a if x else b)
"if_exp_lambda": 28, # IfExp involving a lambda expression
"if_exp_not_lambda": 28, # negated IfExp involving a lambda expression
"if_exp_not": 28,
"if_exp_true": 28, # (a if True else b)
"if_exp_ret": 28,
'or': 26, # Boolean OR
'ret_or': 26,
"or": 26, # Boolean OR
"ret_or": 26,
'and': 24, # Boolean AND
'compare': 20, # in, not in, is, is not, <, <=, >, >=, !=, ==
'ret_and': 24,
'unary_not': 22, # Boolean NOT
"and": 24, # Boolean AND
"ret_and": 24,
"not": 22, # Boolean NOT
"unary_not": 22, # Boolean NOT
"compare": 20, # in, not in, is, is not, <, <=, >, >=, !=, ==
'BINARY_AND': 14, # Bitwise AND
'BINARY_OR': 18, # Bitwise OR
'BINARY_XOR': 16, # Bitwise XOR
"BINARY_AND": 14, # Bitwise AND
"BINARY_OR": 18, # Bitwise OR
"BINARY_XOR": 16, # Bitwise XOR
'BINARY_LSHIFT': 12, # Shifts <<
'BINARY_RSHIFT': 12, # Shifts >>
"BINARY_LSHIFT": 12, # Shifts <<
"BINARY_RSHIFT": 12, # Shifts >>
'BINARY_ADD': 10, # -
'BINARY_SUBTRACT': 10, # +
"BINARY_ADD": 10, # -
"BINARY_SUBTRACT": 10, # +
'BINARY_DIVIDE': 8, # /
'BINARY_FLOOR_DIVIDE': 8, # //
'BINARY_MATRIX_MULTIPLY': 8, # @
'BINARY_MODULO': 8, # Remainder, %
'BINARY_MULTIPLY': 8, # *
'BINARY_TRUE_DIVIDE': 8, # Division /
"BINARY_DIVIDE": 8, # /
"BINARY_FLOOR_DIVIDE": 8, # //
"BINARY_MATRIX_MULTIPLY": 8, # @
"BINARY_MODULO": 8, # Remainder, %
"BINARY_MULTIPLY": 8, # *
"BINARY_TRUE_DIVIDE": 8, # Division /
'unary_op': 6, # +x, -x, ~x
"unary_op": 6, # +x, -x, ~x
'BINARY_POWER': 4, # Exponentiation, *
"BINARY_POWER": 4, # Exponentiation, *
'await_expr': 3, # await x, *
"await_expr": 3, # await x, *
'attribute': 2, # x.attribute
'buildslice2': 2, # x[index]
'buildslice3': 2, # x[index:index]
'call': 2, # x(arguments...)
'delete_subscript': 2,
'slice0': 2,
'slice1': 2,
'slice2': 2,
'slice3': 2,
'store_subscript': 2,
'subscript': 2,
'subscript2': 2,
"attribute": 2, # x.attribute
"buildslice2": 2, # x[index]
"buildslice3": 2, # x[index:index]
"call": 2, # x(arguments...)
"delete_subscript": 2,
"slice0": 2,
"slice1": 2,
"slice2": 2,
"slice3": 2,
"store_subscript": 2,
"subscript": 2,
"subscript2": 2,
'dict': 0, # {expressions...}
'dict_comp': 0,
'generator_exp': 0, # (expressions...)
'list': 0, # [expressions...]
'list_comp': 0,
'set_comp': 0,
'set_comp_expr': 0,
'unary_convert': 0,
"dict": 0, # {expressions...}
"dict_comp": 0,
"generator_exp": 0, # (expressions...)
"list": 0, # [expressions...]
"list_comp": 0,
"set_comp": 0,
"set_comp_expr": 0,
"unary_convert": 0,
}
LINE_LENGTH = 80
# Some parse trees created below are used for comparing code
# fragments (like 'return None' at the end of functions).
# fragments (like "return None" at the end of functions).
RETURN_LOCALS = SyntaxTree('return',
[ SyntaxTree('ret_expr', [SyntaxTree('expr', [ Token('LOAD_LOCALS') ])]),
Token('RETURN_VALUE')])
RETURN_LOCALS = SyntaxTree(
"return",
[
SyntaxTree("ret_expr", [SyntaxTree("expr", [Token("LOAD_LOCALS")])]),
Token("RETURN_VALUE"),
],
)
NONE = SyntaxTree('expr', [ NoneToken ] )
NONE = SyntaxTree("expr", [NoneToken])
RETURN_NONE = SyntaxTree('stmt',
[ SyntaxTree('return',
[ NONE, Token('RETURN_VALUE')]) ])
RETURN_NONE = SyntaxTree("stmt", [SyntaxTree("return", [NONE, Token("RETURN_VALUE")])])
PASS = SyntaxTree('stmts',
[ SyntaxTree('sstmt',
[ SyntaxTree('stmt',
[ SyntaxTree('pass', [])])])])
PASS = SyntaxTree(
"stmts", [SyntaxTree("sstmt", [SyntaxTree("stmt", [SyntaxTree("pass", [])])])]
)
ASSIGN_DOC_STRING = lambda doc_string, doc_load: \
SyntaxTree("stmt",
[ SyntaxTree("assign",
[ SyntaxTree("expr", [ Token(doc_load, pattr=doc_string, attr=doc_string) ]),
SyntaxTree("store", [ Token("STORE_NAME", pattr="__doc__")])
])])
ASSIGN_DOC_STRING = lambda doc_string, doc_load: SyntaxTree(
"stmt",
[
SyntaxTree(
"assign",
[
SyntaxTree(
"expr", [Token(doc_load, pattr=doc_string, attr=doc_string)]
),
SyntaxTree("store", [Token("STORE_NAME", pattr="__doc__")]),
],
)
],
)
NAME_MODULE = SyntaxTree('assign',
[ SyntaxTree('expr',
[Token('LOAD_NAME', pattr='__name__', offset=0, has_arg=True)]),
SyntaxTree('store',
[ Token('STORE_NAME', pattr='__module__', offset=3, has_arg=True)])
])
NAME_MODULE = SyntaxTree(
"assign",
[
SyntaxTree(
"expr", [Token("LOAD_NAME", pattr="__name__", offset=0, has_arg=True)]
),
SyntaxTree(
"store", [Token("STORE_NAME", pattr="__module__", offset=3, has_arg=True)]
),
],
)
# God intended \t, but Python has decided to use 4 spaces.
# If you want real tabs, use Go.
@@ -158,312 +174,255 @@ TAB = " " * 4
INDENT_PER_LEVEL = " " # additional intent per pretty-print level
TABLE_R = {
'STORE_ATTR': ( '%c.%[1]{pattr}', 0),
'DELETE_ATTR': ( '%|del %c.%[-1]{pattr}\n', 0 ),
"STORE_ATTR": ("%c.%[1]{pattr}", 0),
"DELETE_ATTR": ("%|del %c.%[-1]{pattr}\n", 0),
}
TABLE_R0 = {
# 'BUILD_LIST': ( '[%C]', (0,-1,', ') ),
# 'BUILD_TUPLE': ( '(%C)', (0,-1,', ') ),
# 'CALL_FUNCTION': ( '%c(%P)', 0, (1,-1,', ') ),
# "BUILD_LIST": ( "[%C]", (0,-1,", ") ),
# "BUILD_TUPLE": ( "(%C)", (0,-1,", ") ),
# "CALL_FUNCTION": ( "%c(%P)", 0, (1,-1,", ") ),
}
TABLE_DIRECT = {
'BINARY_ADD': ( '+' ,),
'BINARY_SUBTRACT': ( '-' ,),
'BINARY_MULTIPLY': ( '*' ,),
'BINARY_DIVIDE': ( '/' ,),
'BINARY_MATRIX_MULTIPLY': ( '@' ,),
'BINARY_TRUE_DIVIDE': ( '/' ,), # Not in <= 2.1
'BINARY_FLOOR_DIVIDE': ( '//' ,),
'BINARY_MODULO': ( '%%',),
'BINARY_POWER': ( '**',),
'BINARY_LSHIFT': ( '<<',),
'BINARY_RSHIFT': ( '>>',),
'BINARY_AND': ( '&' ,),
'BINARY_OR': ( '|' ,),
'BINARY_XOR': ( '^' ,),
'INPLACE_ADD': ( '+=' ,),
'INPLACE_SUBTRACT': ( '-=' ,),
'INPLACE_MULTIPLY': ( '*=' ,),
'INPLACE_MATRIX_MULTIPLY': ( '@=' ,),
'INPLACE_DIVIDE': ( '/=' ,),
'INPLACE_TRUE_DIVIDE': ( '/=' ,), # Not in <= 2.1; 2.6 generates INPLACE_DIVIDE only?
'INPLACE_FLOOR_DIVIDE': ( '//=' ,),
'INPLACE_MODULO': ( '%%=',),
'INPLACE_POWER': ( '**=',),
'INPLACE_LSHIFT': ( '<<=',),
'INPLACE_RSHIFT': ( '>>=',),
'INPLACE_AND': ( '&=' ,),
'INPLACE_OR': ( '|=' ,),
'INPLACE_XOR': ( '^=' ,),
"BINARY_ADD": ("+",),
"BINARY_SUBTRACT": ("-",),
"BINARY_MULTIPLY": ("*",),
"BINARY_DIVIDE": ("/",),
"BINARY_MATRIX_MULTIPLY": ("@",),
"BINARY_TRUE_DIVIDE": ("/",), # Not in <= 2.1
"BINARY_FLOOR_DIVIDE": ("//",),
"BINARY_MODULO": ("%%",),
"BINARY_POWER": ("**",),
"BINARY_LSHIFT": ("<<",),
"BINARY_RSHIFT": (">>",),
"BINARY_AND": ("&",),
"BINARY_OR": ("|",),
"BINARY_XOR": ("^",),
"INPLACE_ADD": ("+=",),
"INPLACE_SUBTRACT": ("-=",),
"INPLACE_MULTIPLY": ("*=",),
"INPLACE_MATRIX_MULTIPLY": ("@=",),
"INPLACE_DIVIDE": ("/=",),
"INPLACE_TRUE_DIVIDE": ("/=",), # Not in <= 2.1; 2.6 generates INPLACE_DIVIDE only?
"INPLACE_FLOOR_DIVIDE": ("//=",),
"INPLACE_MODULO": ("%%=",),
"INPLACE_POWER": ("**=",),
"INPLACE_LSHIFT": ("<<=",),
"INPLACE_RSHIFT": (">>=",),
"INPLACE_AND": ("&=",),
"INPLACE_OR": ("|=",),
"INPLACE_XOR": ("^=",),
# bin_op (formerly "binary_expr") is the Python AST BinOp
'bin_op': ( '%c %c %c', 0,
(-1, 'binary_operator'),
( 1, 'expr' ) ),
'UNARY_POSITIVE': ( '+',),
'UNARY_NEGATIVE': ( '-',),
'UNARY_INVERT': ( '~'),
"bin_op": ("%c %c %c", 0, (-1, "binary_operator"), (1, "expr")),
"UNARY_POSITIVE": ("+",),
"UNARY_NEGATIVE": ("-",),
"UNARY_INVERT": ("~"),
# unary_op (formerly "unary_expr") is the Python AST UnaryOp
'unary_op': ( '%c%c',
(1, 'unary_operator'),
(0, 'expr') ),
'unary_not': ( 'not %c',
(0, 'expr' ) ),
'unary_convert': ( '`%c`',
(0, 'expr' ), ),
'get_iter': ( 'iter(%c)',
(0, 'expr'), ),
'slice0': ( '%c[:]',
(0, 'expr'), ),
'slice1': ( '%c[%p:]',
(0, 'expr'),
(1, 100) ),
'slice2': ( '%c[:%p]',
(0, 'expr'),
(1, 100) ),
'slice3': ( '%c[%p:%p]',
(0, 'expr'),
(1, 100), (2, 100) ),
'IMPORT_FROM': ( '%{pattr}', ),
'IMPORT_NAME_ATTR': ( '%{pattr}', ),
'attribute': ( '%c.%[1]{pattr}',
(0, 'expr')),
'LOAD_STR': ( '%{pattr}', ),
'LOAD_FAST': ( '%{pattr}', ),
'LOAD_NAME': ( '%{pattr}', ),
'LOAD_CLASSNAME': ( '%{pattr}', ),
'LOAD_GLOBAL': ( '%{pattr}', ),
'LOAD_DEREF': ( '%{pattr}', ),
'LOAD_LOCALS': ( 'locals()', ),
'LOAD_ASSERT': ( '%{pattr}', ),
'DELETE_FAST': ( '%|del %{pattr}\n', ),
'DELETE_NAME': ( '%|del %{pattr}\n', ),
'DELETE_GLOBAL': ( '%|del %{pattr}\n', ),
'delete_subscript': ( '%|del %p[%c]\n',
(0, 'expr', PRECEDENCE['subscript']), (1, 'expr') ),
'subscript': ( '%p[%c]',
(0, 'expr', PRECEDENCE['subscript']),
(1, 'expr') ),
'subscript2': ( '%p[%c]',
(0, 'expr', PRECEDENCE['subscript']),
(1, 'expr') ),
'store_subscript': ( '%p[%c]',
(0, 'expr', PRECEDENCE['subscript']),
(1, 'expr') ),
'STORE_FAST': ( '%{pattr}', ),
'STORE_NAME': ( '%{pattr}', ),
'STORE_GLOBAL': ( '%{pattr}', ),
'STORE_DEREF': ( '%{pattr}', ),
'unpack': ( '%C%,', (1, maxint, ', ') ),
"unary_op": ("%c%c", (1, "unary_operator"), (0, "expr")),
"unary_not": ("not %c", (0, "expr")),
"unary_convert": ("`%c`", (0, "expr"),),
"get_iter": ("iter(%c)", (0, "expr"),),
"slice0": ("%c[:]", (0, "expr"),),
"slice1": ("%c[%p:]", (0, "expr"), (1, 100)),
"slice2": ("%c[:%p]", (0, "expr"), (1, 100)),
"slice3": ("%c[%p:%p]", (0, "expr"), (1, 100), (2, 100)),
"IMPORT_FROM": ("%{pattr}",),
"IMPORT_NAME_ATTR": ("%{pattr}",),
"attribute": ("%c.%[1]{pattr}", (0, "expr")),
"LOAD_STR": ("%{pattr}",),
"LOAD_FAST": ("%{pattr}",),
"LOAD_NAME": ("%{pattr}",),
"LOAD_CLASSNAME": ("%{pattr}",),
"LOAD_GLOBAL": ("%{pattr}",),
"LOAD_DEREF": ("%{pattr}",),
"LOAD_LOCALS": ("locals()",),
"LOAD_ASSERT": ("%{pattr}",),
"DELETE_FAST": ("%|del %{pattr}\n",),
"DELETE_NAME": ("%|del %{pattr}\n",),
"DELETE_GLOBAL": ("%|del %{pattr}\n",),
"delete_subscript": (
"%|del %p[%c]\n",
(0, "expr", PRECEDENCE["subscript"]),
(1, "expr"),
),
"subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"subscript2": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"store_subscript": ("%p[%c]", (0, "expr", PRECEDENCE["subscript"]), (1, "expr")),
"STORE_FAST": ("%{pattr}",),
"STORE_NAME": ("%{pattr}",),
"STORE_GLOBAL": ("%{pattr}",),
"STORE_DEREF": ("%{pattr}",),
"unpack": ("%C%,", (1, maxint, ", ")),
# This nonterminal we create on the fly in semantic routines
'unpack_w_parens': ( '(%C%,)', (1, maxint, ', ') ),
"unpack_w_parens": ("(%C%,)", (1, maxint, ", ")),
# This nonterminal we create on the fly in semantic routines
'attribute_w_parens': ( '(%c).%[1]{pattr}',
(0, 'expr')),
"attribute_w_parens": ("(%c).%[1]{pattr}", (0, "expr")),
# This nonterminal we create on the fly in semantic routines
'store_w_parens': ( '(%c).%[1]{pattr}',
(0, 'expr')),
'unpack_list': ( '[%C]',
(1, maxint, ', ') ),
'build_tuple2': ( '%P',
(0, -1, ', ', 100) ),
'list_iter': ( '%c', 0 ),
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
'list_if': ( ' if %p%c',
(0, 'expr', 27), 2 ),
'list_if_not': ( ' if not %p%c',
(0, 'expr', PRECEDENCE['unary_not']),
2 ),
'lc_body': ( '', ), # ignore when recursing
'comp_iter': ( '%c', 0 ),
'comp_if': ( ' if %c%c', 0, 2 ),
'comp_if_not': ( ' if not %p%c',
(0, 'expr', PRECEDENCE['unary_not']), 2 ),
'comp_body': ( '', ), # ignore when recusing
'set_comp_body': ( '%c', 0 ),
'gen_comp_body': ( '%c', 0 ),
'dict_comp_body': ( '%c:%c', 1, 0 ),
'assign': ( '%|%c = %p\n', -1, (0, 200) ),
"store_w_parens": ("(%c).%[1]{pattr}", (0, "expr")),
"unpack_list": ("[%C]", (1, maxint, ", ")),
"build_tuple2": ("%P", (0, -1, ", ", 100)),
"list_iter": ("%c", 0),
"list_for": (" for %c in %c%c", 2, 0, 3),
"list_if": (" if %p%c", (0, "expr", 27), 2),
"list_if_not": (" if not %p%c", (0, "expr", PRECEDENCE["unary_not"]), 2),
"lc_body": ("",), # ignore when recursing
"comp_iter": ("%c", 0),
"comp_if": (" if %c%c", 0, 2),
"comp_if_not": (" if not %p%c", (0, "expr", PRECEDENCE["unary_not"]), 2),
"comp_body": ("",), # ignore when recusing
"set_comp_body": ("%c", 0),
"gen_comp_body": ("%c", 0),
"dict_comp_body": ("%c:%c", 1, 0),
"assign": ("%|%c = %p\n", -1, (0, 200)),
# The 2nd parameter should have a = suffix.
# There is a rule with a 4th parameter "store"
# which we don't use here.
'aug_assign1': ( '%|%c %c %c\n', 0, 2, 1),
'aug_assign2': ( '%|%c.%[2]{pattr} %c %c\n', 0, -3, -4 ),
'designList': ( '%c = %c', 0, -1 ),
'and': ( '%c and %c', 0, 2 ),
'ret_and': ( '%c and %c', 0, 2 ),
'and2': ( '%c', 3 ),
'or': ( '%c or %c', 0, 2 ),
'ret_or': ( '%c or %c', 0, 2 ),
'if_exp': ( '%p if %c else %c',
(2, 'expr', 27), 0, 4 ),
'if_exp_lambda': ( '%p if %c else %c',
(2, 'expr', 27), (0, 'expr'), 4 ),
'if_exp_true': ( '%p if 1 else %c', (0, 'expr', 27), 2 ),
'if_exp_ret': ( '%p if %p else %p', (2, 27), (0, 27), (-1, 27) ),
'if_exp_not': ( '%p if not %p else %p',
"aug_assign1": ("%|%c %c %c\n", 0, 2, 1),
"aug_assign2": ("%|%c.%[2]{pattr} %c %c\n", 0, -3, -4),
"designList": ("%c = %c", 0, -1),
"and": ("%c and %c", 0, 2),
"ret_and": ("%c and %c", 0, 2),
"and2": ("%c", 3),
"or": ("%c or %c", 0, 2),
"ret_or": ("%c or %c", 0, 2),
"if_exp": ("%p if %c else %c", (2, "expr", 27), 0, 4),
"if_exp_lambda": ("%p if %c else %c", (2, "expr", 27), (0, "expr"), 4),
"if_exp_true": ("%p if 1 else %c", (0, "expr", 27), 2),
"if_exp_ret": ("%p if %p else %p", (2, 27), (0, 27), (-1, 27)),
"if_exp_not": (
"%p if not %p else %p",
(2, 27),
(0, "expr", PRECEDENCE['unary_not']),
(4, 27) ),
'if_exp_not_lambda':
( '%p if not %c else %c',
(2, 'expr', 27), 0, 4 ),
'compare_single': ( '%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19) ),
'compare_chained': ( '%p %p', (0, 29), (1, 30)),
'compare_chained1': ( '%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (-2, 19)),
'compare_chained2': ( '%[1]{pattr.replace("-", " ")} %p', (0, 19)),
# 'classdef': (), # handled by n_classdef()
(0, "expr", PRECEDENCE["unary_not"]),
(4, 27),
),
"if_exp_not_lambda": ("%p if not %c else %c", (2, "expr", 27), 0, 4),
"compare_single": ('%p %[-1]{pattr.replace("-", " ")} %p', (0, 19), (1, 19)),
"compare_chained": ("%p %p", (0, 29), (1, 30)),
"compare_chained1": ('%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (-2, 19)),
"compare_chained2": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)),
# "classdef": (), # handled by n_classdef()
# A custom rule in n_function def distinguishes whether to call this or
# function_def_async
'function_def': ( '\n\n%|def %c\n', -2), # -2 to handle closures
'function_def_deco': ( '\n\n%c', 0),
'mkfuncdeco': ( '%|@%c\n%c', 0, 1),
"function_def": ("\n\n%|def %c\n", -2), # -2 to handle closures
"function_def_deco": ("\n\n%c", 0),
"mkfuncdeco": ("%|@%c\n%c", 0, 1),
# A custom rule in n_function def distinguishes whether to call this or
# function_def_async
'mkfuncdeco0': ( '%|def %c\n', 0),
'classdefdeco': ( '\n\n%c', 0),
'classdefdeco1': ( '%|@%c\n%c', 0, 1),
'kwarg': ( '%[0]{pattr}=%c', 1), # Change when Python 2 does LOAD_STR
'kwargs': ( '%D', (0, maxint, ', ') ),
'kwargs1': ( '%D', (0, maxint, ', ') ),
'assert_expr_or': ( '%c or %c', 0, 2 ),
'assert_expr_and': ( '%c and %c', 0, 2 ),
'print_items_stmt': ( '%|print %c%c,\n', 0, 2 ), # Python 2 only
'print_items_nl_stmt': ( '%|print %c%c\n', 0, 2 ),
'print_item': ( ', %c', 0),
'print_nl': ( '%|print\n', ),
'print_to': ( '%|print >> %c, %c,\n', 0, 1 ),
'print_to_nl': ( '%|print >> %c, %c\n', 0, 1 ),
'print_nl_to': ( '%|print >> %c\n', 0 ),
'print_to_items': ( '%C', (0, 2, ', ') ),
"mkfuncdeco0": ("%|def %c\n", 0),
"classdefdeco": ("\n\n%c", 0),
"classdefdeco1": ("%|@%c\n%c", 0, 1),
"kwarg": ("%[0]{pattr}=%c", 1), # Change when Python 2 does LOAD_STR
"kwargs": ("%D", (0, maxint, ", ")),
"kwargs1": ("%D", (0, maxint, ", ")),
"assert_expr_or": ("%c or %c", 0, 2),
"assert_expr_and": ("%c and %c", 0, 2),
"print_items_stmt": ("%|print %c%c,\n", 0, 2), # Python 2 only
"print_items_nl_stmt": ("%|print %c%c\n", 0, 2),
"print_item": (", %c", 0),
"print_nl": ("%|print\n",),
"print_to": ("%|print >> %c, %c,\n", 0, 1),
"print_to_nl": ("%|print >> %c, %c\n", 0, 1),
"print_nl_to": ("%|print >> %c\n", 0),
"print_to_items": ("%C", (0, 2, ", ")),
# This is only generated by transform
# it is a string at the beginning of a function that is *not* a docstring
# 3.7 test_fstring.py tests for this kind of crap.
# For compatibility with older Python, we'll use "%" instead of
# a format string.
"string_at_beginning": ('%|"%%s" %% %c\n', 0),
'call_stmt': ( '%|%p\n', (0, 200)),
'break': ( '%|break\n', ),
'continue': ( '%|continue\n', ),
'raise_stmt0': ( '%|raise\n', ),
'raise_stmt1': ( '%|raise %c\n', 0),
'raise_stmt3': ( '%|raise %c, %c, %c\n', 0, 1, 2),
# 'yield': ( 'yield %c', 0),
# 'return': ( '%|return %c\n', 0),
'return_if_stmt': ( 'return %c\n', 0),
"ifstmt": ( "%|if %c:\n%+%c%-",
"call_stmt": ("%|%p\n", (0, 200)),
"break": ("%|break\n",),
"continue": ("%|continue\n",),
"raise_stmt0": ("%|raise\n",),
"raise_stmt1": ("%|raise %c\n", 0),
"raise_stmt3": ("%|raise %c, %c, %c\n", 0, 1, 2),
# "yield": ( "yield %c", 0),
# "return": ( "%|return %c\n", 0),
"return_if_stmt": ("return %c\n", 0),
"ifstmt": (
"%|if %c:\n%+%c%-",
0, # "testexpr" or "testexpr_then"
1, # "_ifstmts_jump" or "return_stmts"
),
'iflaststmt': ( "%|if %c:\n%+%c%-", 0, 1 ),
'iflaststmtl': ( "%|if %c:\n%+%c%-", 0, 1 ),
'testtrue': ( "not %p",
(0, PRECEDENCE['unary_not']) ),
"iflaststmt": ("%|if %c:\n%+%c%-", 0, 1),
"iflaststmtl": ("%|if %c:\n%+%c%-", 0, 1),
"testtrue": ("not %p", (0, PRECEDENCE["unary_not"])),
# Generally the args here are 0: (some sort of) "testexpr",
# 1: (some sort of) "cstmts_opt",
# 2 or 3: "else_suite"
# But unfortunately there are irregularities, For example, 2.6- uses "testexpr_then"
# and sometimes "cstmts" instead of "cstmts_opt" happens.
# Down the line we might isolate these into version-specific rules.
'ifelsestmt': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtc': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtl': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
"ifelsestmt": ("%|if %c:\n%+%c%-%|else:\n%+%c%-", 0, 1, 3),
"ifelsestmtc": ("%|if %c:\n%+%c%-%|else:\n%+%c%-", 0, 1, 3),
"ifelsestmtl": ("%|if %c:\n%+%c%-%|else:\n%+%c%-", 0, 1, 3),
# These are created only via transformation
'ifelifstmt': ( '%|if %c:\n%+%c%-%c',
0, # "testexpr" or "testexpr_then"
1, 3 ),
'elifelifstmt': ( '%|elif %c:\n%+%c%-%c', 0, 1, 3 ),
'elifstmt': ( '%|elif %c:\n%+%c%-', 0, 1 ),
'elifelsestmt': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 3 ),
'ifelsestmtr': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-', 0, 1, 2 ),
'ifelsestmtr2': ( '%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'elifelsestmtr': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 2 ),
'elifelsestmtr2': ( '%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n', 0, 1, 3 ), # has COME_FROM
'whileTruestmt': ( '%|while True:\n%+%c%-\n\n', 1 ),
'whilestmt': ( '%|while %c:\n%+%c%-\n\n', 1, 2 ),
'while1stmt': ( '%|while 1:\n%+%c%-\n\n', 1 ),
'while1elsestmt': ( '%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n', 1, -2 ),
'whileelsestmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -2 ),
'whileelsestmt2': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n', 1, 2, -3 ),
'whileelselaststmt': ( '%|while %c:\n%+%c%-%|else:\n%+%c%-', 1, 2, -2 ),
"ifelifstmt": ("%|if %c:\n%+%c%-%c", 0, 1, 3), # "testexpr" or "testexpr_then"
"elifelifstmt": ("%|elif %c:\n%+%c%-%c", 0, 1, 3),
"elifstmt": ("%|elif %c:\n%+%c%-", 0, 1),
"elifelsestmt": ("%|elif %c:\n%+%c%-%|else:\n%+%c%-", 0, 1, 3),
"ifelsestmtr": ("%|if %c:\n%+%c%-%|else:\n%+%c%-", 0, 1, 2),
"ifelsestmtr2": ("%|if %c:\n%+%c%-%|else:\n%+%c%-\n\n", 0, 1, 3), # has COME_FROM
"elifelsestmtr": ("%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n", 0, 1, 2),
"elifelsestmtr2": (
"%|elif %c:\n%+%c%-%|else:\n%+%c%-\n\n",
0,
1,
3,
), # has COME_FROM
"whileTruestmt": ("%|while True:\n%+%c%-\n\n", 1),
"whilestmt": ("%|while %c:\n%+%c%-\n\n", 1, 2),
"while1stmt": ("%|while 1:\n%+%c%-\n\n", 1),
"while1elsestmt": ("%|while 1:\n%+%c%-%|else:\n%+%c%-\n\n", 1, -2),
"whileelsestmt": ("%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n", 1, 2, -2),
"whileelsestmt2": ("%|while %c:\n%+%c%-%|else:\n%+%c%-\n\n", 1, 2, -3),
"whileelselaststmt": ("%|while %c:\n%+%c%-%|else:\n%+%c%-", 1, 2, -2),
# Note: Python 3.8+ changes this
'for': ( '%|for %c in %c:\n%+%c%-\n\n',
(3, 'store'),
(1, 'expr'),
(4, 'for_block') ),
'forelsestmt': (
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n',
(3, 'store'),
(1, 'expr'),
(4, 'for_block'), -2 ),
'forelselaststmt': (
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-',
(3, 'store'),
(1, 'expr'),
(4, 'for_block'), -2 ),
'forelselaststmtl': (
'%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n',
(3, 'store'),
(1, 'expr'),
(4, 'for_block'), -2 ),
'try_except': ( '%|try:\n%+%c%-%c\n\n', 1, 3 ),
'tryelsestmt': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n', 1, 3, 4 ),
'tryelsestmtc': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
'tryelsestmtl': ( '%|try:\n%+%c%-%c%|else:\n%+%c%-', 1, 3, 4 ),
"for": ("%|for %c in %c:\n%+%c%-\n\n", (3, "store"), (1, "expr"), (4, "for_block")),
"forelsestmt": (
"%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
(3, "store"),
(1, "expr"),
(4, "for_block"),
-2,
),
"forelselaststmt": (
"%|for %c in %c:\n%+%c%-%|else:\n%+%c%-",
(3, "store"),
(1, "expr"),
(4, "for_block"),
-2,
),
"forelselaststmtl": (
"%|for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
(3, "store"),
(1, "expr"),
(4, "for_block"),
-2,
),
"try_except": ("%|try:\n%+%c%-%c\n\n", 1, 3),
"tryelsestmt": ("%|try:\n%+%c%-%c%|else:\n%+%c%-\n\n", 1, 3, 4),
"tryelsestmtc": ("%|try:\n%+%c%-%c%|else:\n%+%c%-", 1, 3, 4),
"tryelsestmtl": ("%|try:\n%+%c%-%c%|else:\n%+%c%-", 1, 3, 4),
# Note: this is generated generated by grammar rules but in this phase.
'tf_try_except': ( '%c%-%c%+', 1, 3 ),
'tf_tryelsestmt': ( '%c%-%c%|else:\n%+%c', 1, 3, 4 ),
'tryfinallystmt': ( '%|try:\n%+%c%-%|finally:\n%+%c%-\n\n', 1, 5 ),
'except': ( '%|except:\n%+%c%-', 3 ),
'except_cond1': ( '%|except %c:\n', 1 ),
'except_cond2': ( '%|except %c as %c:\n',
(1, 'expr'), (5, 'store') ),
'except_suite': ( '%+%c%-%C', 0, (1, maxint, '') ),
"tf_try_except": ("%c%-%c%+", 1, 3),
"tf_tryelsestmt": ("%c%-%c%|else:\n%+%c", 1, 3, 4),
"tryfinallystmt": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", 1, 5),
"except": ("%|except:\n%+%c%-", 3),
"except_cond1": ("%|except %c:\n", 1),
"except_cond2": ("%|except %c as %c:\n", (1, "expr"), (5, "store")),
"except_suite": ("%+%c%-%C", 0, (1, maxint, "")),
# In Python 3.6+, this is more complicated in the presence of "returns"
'except_suite_finalize': ( '%+%c%-%C', 1, (3, maxint, '') ),
'pass': ( '%|pass\n', ),
'STORE_FAST': ( '%{pattr}', ),
'kv': ( '%c: %c', 3, 1 ),
'kv2': ( '%c: %c', 1, 2 ),
'import': ( '%|import %c\n', 2),
'importlist': ( '%C', (0, maxint, ', ') ),
'import_from': ( '%|from %[2]{pattr} import %c\n',
(3, 'importlist') ),
'import_from_star': ( '%|from %[2]{pattr} import *\n', ),
"except_suite_finalize": ("%+%c%-%C", 1, (3, maxint, "")),
"pass": ("%|pass\n",),
"STORE_FAST": ("%{pattr}",),
"kv": ("%c: %c", 3, 1),
"kv2": ("%c: %c", 1, 2),
"import": ("%|import %c\n", 2),
"importlist": ("%C", (0, maxint, ", ")),
"import_from": ("%|from %[2]{pattr} import %c\n", (3, "importlist")),
"import_from_star": ("%|from %[2]{pattr} import *\n",),
}
@@ -479,12 +438,16 @@ MAP = {
"exprlist": MAP_R0,
}
ASSIGN_TUPLE_PARAM = lambda param_name: \
SyntaxTree("expr", [ Token("LOAD_FAST", pattr=param_name) ])
ASSIGN_TUPLE_PARAM = lambda param_name: SyntaxTree(
"expr", [Token("LOAD_FAST", pattr=param_name)]
)
escape = re.compile(r"""
escape = re.compile(
r"""
(?P<prefix> [^%]* )
% ( \[ (?P<child> -? \d+ ) \] )?
((?P<type> [^{] ) |
( [{] (?P<expr> [^}]* ) [}] ))
""", re.VERBOSE)
""",
re.VERBOSE,
)

View File

@@ -264,7 +264,11 @@ def customize_for_version37(self, version):
and opname == "CALL_FUNCTION_1"
or not re.match("\d", opname[-1])
):
self.template_engine(("%c(%c)", (0, "expr"), 1), node)
self.template_engine(
("%c(%p)",
(0, "expr"),
(1, PRECEDENCE["yield"]-1)),
node)
self.prec = p
self.prune()
else:

View File

@@ -597,7 +597,10 @@ class SourceWalker(GenericASTTraversal, object):
def n_ret_expr(self, node):
if len(node) == 1 and node[0] == "expr":
# If expr is yield we want parens.
self.prec = PRECEDENCE["yield"] - 1
self.n_expr(node[0])
p = self.prec
else:
self.n_expr(node)
@@ -1819,7 +1822,7 @@ class SourceWalker(GenericASTTraversal, object):
prettyprint a list or tuple
"""
p = self.prec
self.prec = 100
self.prec = PRECEDENCE["yield"] - 1
lastnode = node.pop()
lastnodetype = lastnode.kind
@@ -2156,7 +2159,7 @@ class SourceWalker(GenericASTTraversal, object):
elif self.version >= 3.6 and k.startswith("CALL_FUNCTION_KW"):
TABLE_R[k] = ("%c(%P)", 0, (1, -1, ", ", 100))
elif op == "CALL_FUNCTION":
TABLE_R[k] = ("%c(%P)", 0, (1, -1, ", ", 100))
TABLE_R[k] = ("%c(%P)", (0, "expr"), (1, -1, ", ", PRECEDENCE["yield"]-1))
elif op in (
"CALL_FUNCTION_VAR",
"CALL_FUNCTION_VAR_KW",