You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-04 01:09:52 +08:00
Start parse2 customize_grammar_rule refactor
This commit is contained in:
@@ -158,6 +158,15 @@ class PythonParser(GenericASTBuilder):
|
|||||||
else:
|
else:
|
||||||
raise ParserError(None, -1)
|
raise ParserError(None, -1)
|
||||||
|
|
||||||
|
def get_pos_kw(self, token):
|
||||||
|
"""Return then the number of positional parameters and
|
||||||
|
represented by the attr field of token"""
|
||||||
|
# Low byte indicates number of positional paramters,
|
||||||
|
# high byte number of keyword parameters
|
||||||
|
args_pos = token.attr & 0xff
|
||||||
|
args_kw = (token.attr >> 8) & 0xff
|
||||||
|
return args_pos, args_kw
|
||||||
|
|
||||||
def nonterminal(self, nt, args):
|
def nonterminal(self, nt, args):
|
||||||
n = len(args)
|
n = len(args)
|
||||||
|
|
||||||
|
@@ -271,6 +271,7 @@ class Python2Parser(PythonParser):
|
|||||||
|
|
||||||
# The order of opname listed is roughly sorted below
|
# The order of opname listed is roughly sorted below
|
||||||
if opname_base in ('BUILD_LIST', 'BUILD_SET', 'BUILD_TUPLE'):
|
if opname_base in ('BUILD_LIST', 'BUILD_SET', 'BUILD_TUPLE'):
|
||||||
|
v = token.attr
|
||||||
thousands = (v//1024)
|
thousands = (v//1024)
|
||||||
thirty32s = ((v//32) % 32)
|
thirty32s = ((v//32) % 32)
|
||||||
if thirty32s > 0:
|
if thirty32s > 0:
|
||||||
@@ -326,16 +327,18 @@ class Python2Parser(PythonParser):
|
|||||||
continue
|
continue
|
||||||
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
elif opname_base in ('CALL_FUNCTION', 'CALL_FUNCTION_VAR',
|
||||||
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
'CALL_FUNCTION_VAR_KW', 'CALL_FUNCTION_KW'):
|
||||||
args_pos = (v & 0xff) # positional parameters
|
|
||||||
args_kw = (v >> 8) & 0xff # keyword parameters
|
args_pos, args_kw = self.get_pos_kw(token)
|
||||||
|
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_FUNCTION') ) // 3
|
nak = ( len(opname_base)-len('CALL_FUNCTION') ) // 3
|
||||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
||||||
+ 'expr ' * nak + opname
|
+ 'expr ' * nak + opname
|
||||||
elif opname_base == 'CALL_METHOD':
|
elif opname_base == 'CALL_METHOD':
|
||||||
# PyPy only - DRY with parse3
|
# PyPy only - DRY with parse3
|
||||||
args_pos = (v & 0xff) # positional parameters
|
|
||||||
args_kw = (v >> 8) & 0xff # keyword parameters
|
args_pos, args_kw = self.get_pos_kw(token)
|
||||||
|
|
||||||
# number of apply equiv arguments:
|
# number of apply equiv arguments:
|
||||||
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
nak = ( len(opname_base)-len('CALL_METHOD') ) // 3
|
||||||
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
rule = 'call ::= expr ' + 'expr '*args_pos + 'kwarg '*args_kw \
|
||||||
@@ -357,16 +360,18 @@ class Python2Parser(PythonParser):
|
|||||||
""", nop_func)
|
""", nop_func)
|
||||||
continue
|
continue
|
||||||
elif opname == 'JUMP_IF_NOT_DEBUG':
|
elif opname == 'JUMP_IF_NOT_DEBUG':
|
||||||
self.add_unique_rules([
|
v = token.attr
|
||||||
'jmp_true_false ::= POP_JUMP_IF_TRUE',
|
self.addRule("""
|
||||||
'jmp_true_false ::= POP_JUMP_IF_FALSE',
|
jmp_true_false ::= POP_JUMP_IF_TRUE
|
||||||
"stmt ::= assert_pypy",
|
jmp_true_false ::= POP_JUMP_IF_FALSE
|
||||||
"stmt ::= assert2_pypy",
|
stmt ::= assert_pypy
|
||||||
"assert_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false "
|
stmt ::= assert2_pypy
|
||||||
"LOAD_ASSERT RAISE_VARARGS_1 COME_FROM",
|
assert_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false
|
||||||
"assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false "
|
LOAD_ASSERT RAISE_VARARGS_1 COME_FROM
|
||||||
"LOAD_ASSERT expr CALL_FUNCTION_1 RAISE_VARARGS_1 COME_FROM",
|
assert2_pypy ::= JUMP_IF_NOT_DEBUG assert_expr jmp_true_false
|
||||||
], customize)
|
LOAD_ASSERT expr CALL_FUNCTION_1
|
||||||
|
RAISE_VARARGS_1 COME_FROM
|
||||||
|
""", nop_func)
|
||||||
continue
|
continue
|
||||||
elif opname == 'LOAD_LISTCOMP':
|
elif opname == 'LOAD_LISTCOMP':
|
||||||
self.add_unique_rules([
|
self.add_unique_rules([
|
||||||
@@ -382,14 +387,16 @@ class Python2Parser(PythonParser):
|
|||||||
elif opname == 'LOOKUP_METHOD':
|
elif opname == 'LOOKUP_METHOD':
|
||||||
# A PyPy speciality - DRY with parse3
|
# A PyPy speciality - DRY with parse3
|
||||||
self.add_unique_rule("attribute ::= expr LOOKUP_METHOD",
|
self.add_unique_rule("attribute ::= expr LOOKUP_METHOD",
|
||||||
opname, v, customize)
|
opname, token.attr, customize)
|
||||||
continue
|
continue
|
||||||
elif opname_base == 'MAKE_FUNCTION':
|
elif opname_base == 'MAKE_FUNCTION':
|
||||||
|
# FIXME: remove v here
|
||||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
||||||
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
self.addRule('mklambda ::= %s LOAD_LAMBDA %s' %
|
||||||
('pos_arg '*v, opname), nop_func)
|
('pos_arg '*v, opname), nop_func)
|
||||||
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr '*v, opname)
|
rule = 'mkfunc ::= %s LOAD_CONST %s' % ('expr '*v, opname)
|
||||||
elif opname_base == 'MAKE_CLOSURE':
|
elif opname_base == 'MAKE_CLOSURE':
|
||||||
|
# FIXME: remove v here
|
||||||
# FIXME: use add_unique_rules to tidy this up.
|
# FIXME: use add_unique_rules to tidy this up.
|
||||||
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
if i > 0 and tokens[i-1] == 'LOAD_LAMBDA':
|
||||||
self.addRule('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
self.addRule('mklambda ::= %s load_closure LOAD_LAMBDA %s' %
|
||||||
@@ -445,13 +452,14 @@ class Python2Parser(PythonParser):
|
|||||||
], customize)
|
], customize)
|
||||||
continue
|
continue
|
||||||
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
elif opname_base in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
|
||||||
rule = 'unpack ::= ' + opname + ' store'*v
|
rule = 'unpack ::= ' + opname + ' store' * token.attr
|
||||||
elif opname_base == 'UNPACK_LIST':
|
elif opname_base == 'UNPACK_LIST':
|
||||||
rule = 'unpack_list ::= ' + opname + ' store'*v
|
rule = 'unpack_list ::= ' + opname + ' store' * token.attr
|
||||||
else:
|
else:
|
||||||
raise Exception('unknown customize token %s' % opname)
|
raise Exception('unknown customize token %s' % opname)
|
||||||
self.add_unique_rule(rule, opname_base, v, customize)
|
self.addRule(rule, nop_func)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.check_reduce['aug_assign1'] = 'AST'
|
self.check_reduce['aug_assign1'] = 'AST'
|
||||||
self.check_reduce['aug_assign2'] = 'AST'
|
self.check_reduce['aug_assign2'] = 'AST'
|
||||||
self.check_reduce['_stmts'] = 'AST'
|
self.check_reduce['_stmts'] = 'AST'
|
||||||
|
@@ -518,16 +518,6 @@ class Python3Parser(PythonParser):
|
|||||||
new_rule = rule % (('LOAD_CONST ') * (1 if self.version >= 3.3 else 0))
|
new_rule = rule % (('LOAD_CONST ') * (1 if self.version >= 3.3 else 0))
|
||||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||||
|
|
||||||
def get_pos_kw(self, token):
|
|
||||||
"""Return then the number of positional parameters and
|
|
||||||
represented by the attr field of token"""
|
|
||||||
# Low byte indicates number of positional paramters,
|
|
||||||
# high byte number of keyword parameters
|
|
||||||
args_pos = token.attr & 0xff
|
|
||||||
args_kw = (token.attr >> 8) & 0xff
|
|
||||||
return args_pos, args_kw
|
|
||||||
|
|
||||||
|
|
||||||
def customize_grammar_rules(self, tokens, customize):
|
def customize_grammar_rules(self, tokens, customize):
|
||||||
"""The base grammar we start out for a Python version even with the
|
"""The base grammar we start out for a Python version even with the
|
||||||
subclassing is, well, is pretty base. And we want it that way: lean and
|
subclassing is, well, is pretty base. And we want it that way: lean and
|
||||||
|
Reference in New Issue
Block a user