You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 08:49:51 +08:00
Merge branch 'master' into python-2.4
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -9,6 +9,6 @@ list(x for x in range(10) if x % 2 if x % 3)
|
||||
# expresion which evaluates True unconditionally,
|
||||
# but leave dead code or junk around that we have to match on.
|
||||
# Tests "conditional_true" rule
|
||||
(5 if 1 else max(5, 2))
|
||||
5 if 1 else 2
|
||||
|
||||
0 or max(5, 3) if 0 else 3
|
||||
|
@@ -215,28 +215,24 @@ class Python2Parser(PythonParser):
|
||||
"""
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
"""
|
||||
Special handling for opcodes such as those that take a variable number
|
||||
of arguments -- we add a new rule for each:
|
||||
"""The base grammar we start out for a Python version even with the
|
||||
subclassing is, well, is pretty base. And we want it that way: lean and
|
||||
mean so that parsing will go faster.
|
||||
|
||||
list ::= {expr}^n BUILD_LIST_n
|
||||
list ::= {expr}^n BUILD_TUPLE_n
|
||||
unpack_list ::= UNPACK_LIST {expr}^n
|
||||
unpack ::= UNPACK_TUPLE {expr}^n
|
||||
unpack ::= UNPACK_SEQEUENCE {expr}^n
|
||||
Here, we add additional rules based on specific instructions
|
||||
that are in the instruction/token stream.
|
||||
|
||||
build_set ::= {expr}^n BUILD_SET_n
|
||||
build_set ::= {expr}^n BUILD_SET_UNPACK_n
|
||||
For example if we see a pretty rare JUMP_IF_NOT_DEBUG
|
||||
instruction we'll add the grammar for that.
|
||||
|
||||
mkfunc ::= {expr}^n LOAD_CONST MAKE_FUNCTION_n
|
||||
mklambda ::= {expr}^n LOAD_LAMBDA MAKE_FUNCTION_n
|
||||
mkfunc ::= {expr}^n load_closure LOAD_CONST MAKE_FUNCTION_n
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_n
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_VAR_n POP_TOP
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_VAR_KW_n POP_TOP
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||
More importantly, here we add grammar rules for instructions
|
||||
that may access a variable number of stack items. CALL_FUNCTION,
|
||||
BUILD_LIST and so on are like this.
|
||||
|
||||
Without custom rules, there can be an super-exponential number of
|
||||
derivations. See the deparsing paper for an elaboration of
|
||||
this.
|
||||
|
||||
PyPy adds custom rules here as well
|
||||
"""
|
||||
|
||||
if 'PyPy' in customize:
|
||||
@@ -249,6 +245,17 @@ class Python2Parser(PythonParser):
|
||||
list_comp ::= expr BUILD_LIST_FROM_ARG _for store list_iter
|
||||
JUMP_BACK
|
||||
""", nop_func)
|
||||
|
||||
# Refactor the FIXME below and use the list below
|
||||
# # For a rough break out on the first word. This may
|
||||
# # include instructions that don't need customization,
|
||||
# # but we'll do a finer check after the rough breakout.
|
||||
# customize_instruction_basenames = frozenset(
|
||||
# ('BUILD', 'CALL', 'CONTINUE_LOOP', 'DELETE',
|
||||
# 'EXEC_STMT', 'JUMP', 'LOAD', 'LOOKUP',
|
||||
# 'MAKE', 'SETUP_EXCEPT', 'SETUP_FINALLY',
|
||||
# 'UNPACK'))
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
# FIXME: remove the "v" thing in the code below
|
||||
|
@@ -539,74 +539,31 @@ class Python3Parser(PythonParser):
|
||||
self.add_unique_rule(new_rule, opname, attr, customize)
|
||||
|
||||
def add_custom_rules(self, tokens, customize):
|
||||
"""The base grammar we start out for a Python version even with the
|
||||
subclassing is, well, is pretty base. And we want it that way: lean and
|
||||
mean so that parsing will go faster.
|
||||
|
||||
Here, we add additional rules based on specific instructions
|
||||
that are in the instruction/token stream.
|
||||
|
||||
For example if we see a pretty rare DELETE_DEREF instruction we'll
|
||||
add the grammar for that.
|
||||
|
||||
More importantly, here we add grammar rules for instructions
|
||||
that may access a variable number of stack items. CALL_FUNCTION,
|
||||
BUILD_LIST and so on are like this.
|
||||
|
||||
Without custom rules, there can be an super-exponential number of
|
||||
derivations. See the deparsing paper for an elaboration of
|
||||
this.
|
||||
"""
|
||||
Special handling for opcodes such as those that take a variable number
|
||||
of arguments -- we add a new rule for each:
|
||||
|
||||
unpack_list ::= UNPACK_LIST_n {expr}^n
|
||||
unpack ::= UNPACK_TUPLE_n {expr}^n
|
||||
unpack ::= UNPACK_SEQEUENCE_n {expr}^n
|
||||
unpack_ex ::= UNPACK_EX_b_a {expr}^(a+b)
|
||||
# For a rough break out on the first word. This may
|
||||
# include instructions that don't need customization,
|
||||
# but we'll do a finer check after the rough breakout.
|
||||
customize_instruction_basenames = frozenset(
|
||||
('BUILD', 'CALL', 'DELETE', 'JUMP', 'LOAD', 'LOOKUP', 'MAKE', 'UNPACK'))
|
||||
|
||||
# build_class (see load_build_class)
|
||||
|
||||
# Even the below say _list, in the semantic rules we
|
||||
# disambiguate tuples, and sets from lists
|
||||
|
||||
list ::= {expr}^n BUILD_LIST_n
|
||||
list ::= {expr}^n BUILD_TUPLE_n
|
||||
list ::= {expr}^n BUILD_LIST_UNPACK_n
|
||||
list ::= {expr}^n BUILD_TUPLE_UNPACK_n
|
||||
|
||||
# FIXME:
|
||||
list ::= {expr}^n BUILD_SET_n
|
||||
list ::= {expr}^n BUILD_SET_UNPACK_n
|
||||
should be
|
||||
build_set ::= {expr}^n BUILD_SET_n
|
||||
build_set ::= {expr}^n BUILD_SET_UNPACK_n
|
||||
|
||||
load_closure ::= {LOAD_CLOSURE}^n BUILD_TUPLE_n
|
||||
# call (see custom_classfunc_rule)
|
||||
|
||||
# ------------
|
||||
# Python <= 3.2 omits LOAD_CONST before MAKE_
|
||||
# Note: are the below specific instances of a more general case?
|
||||
# ------------
|
||||
|
||||
# Is there something more general than this? adding pos_arg?
|
||||
# Is there something corresponding using MAKE_CLOSURE?
|
||||
dict_comp ::= LOAD_DICTCOMP [LOAD_CONST] MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
|
||||
generator_exp ::= {pos_arg}^n load_genexpr [LOAD_CONST] MAKE_FUNCTION_n expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
generator_exp ::= {expr}^n load_closure LOAD_GENEXPR [LOAD_CONST]
|
||||
MAKE_CLOSURE_n expr GET_ITER CALL_FUNCTION_1
|
||||
listcomp ::= {pos_arg}^n LOAD_LISTCOMP [LOAD_CONST] MAKE_CLOSURE_n expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
listcomp ::= {pos_arg}^n load_closure LOAD_LISTCOMP [LOAD_CONST]
|
||||
MAKE_CLOSURE_n expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
# Is there something more general than this? adding pos_arg?
|
||||
# Is there something corresponding using MAKE_CLOSURE?
|
||||
For example:
|
||||
# set_comp ::= {pos_arg}^n LOAD_SETCOMP [LOAD_CONST] MAKE_CLOSURE_n
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
|
||||
set_comp ::= LOAD_SETCOMP [LOAD_CONST] MAKE_FUNCTION_0 expr
|
||||
GET_ITER CALL_FUNCTION_1
|
||||
set_comp ::= {pos_arg}^n load_closure LOAD_SETCOMP [LOAD_CONST]
|
||||
MAKE_CLOSURE_n expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
mkfunc ::= {pos_arg}^n load_closure [LOAD_CONST] MAKE_FUNCTION_n
|
||||
mkfunc ::= {pos_arg}^n load_closure [LOAD_CONST] MAKE_CLOSURE_n
|
||||
mkfunc ::= {pos_arg}^n [LOAD_CONST] MAKE_FUNCTION_n
|
||||
mklambda ::= {pos_arg}^n LOAD_LAMBDA [LOAD_CONST] MAKE_FUNCTION_n
|
||||
|
||||
For PYPY:
|
||||
attribute ::= expr LOOKUP_METHOD
|
||||
call ::= expr CALL_METHOD
|
||||
"""
|
||||
is_pypy = False
|
||||
seen_LOAD_BUILD_CLASS = False
|
||||
seen_GET_AWAITABLE_YIELD_FROM = False
|
||||
@@ -644,8 +601,13 @@ class Python3Parser(PythonParser):
|
||||
|
||||
for i, token in enumerate(tokens):
|
||||
opname = token.kind
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
|
||||
# Do a quick breakout before testing potentially
|
||||
# each of the dozen or so instruction in if elif.
|
||||
if opname[:opname.find('_')] not in customize_instruction_basenames:
|
||||
continue
|
||||
|
||||
opname_base = opname[:opname.rfind('_')]
|
||||
# The order of opname listed is roughly sorted below
|
||||
if opname_base == 'BUILD_CONST_KEY_MAP':
|
||||
# This is in 3.6+
|
||||
|
@@ -167,7 +167,7 @@ TABLE_DIRECT = {
|
||||
'list_for': ( ' for %c in %c%c', 2, 0, 3 ),
|
||||
'list_if': ( ' if %c%c', 0, 2 ),
|
||||
'list_if_not': ( ' if not %p%c', (0, 22), 2 ),
|
||||
'lc_body': ( '', ), # ignore when recusing
|
||||
'lc_body': ( '', ), # ignore when recursing
|
||||
|
||||
'comp_iter': ( '%c', 0 ),
|
||||
'comp_if': ( ' if %c%c', 0, 2 ),
|
||||
|
@@ -1083,7 +1083,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
assert n == 'list_iter'
|
||||
|
||||
# Find the list comprehension body. It is the inner-most
|
||||
# node.
|
||||
# node that is not list_.. .
|
||||
while n == 'list_iter':
|
||||
n = n[0] # iterate one nesting deeper
|
||||
if n == 'list_for': n = n[3]
|
||||
@@ -1210,7 +1210,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
n = ast[iter_index]
|
||||
assert n == 'comp_iter', n
|
||||
|
||||
# find innermost node
|
||||
# Find the comprehension body. It is the inner-most
|
||||
# node that is not list_.. .
|
||||
while n == 'comp_iter': # list_iter
|
||||
n = n[0] # recurse one step
|
||||
if n == 'comp_for':
|
||||
@@ -1293,7 +1294,8 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
# FIXME: I'm not totally sure this is right.
|
||||
|
||||
# find innermost node
|
||||
# Find the list comprehension body. It is the inner-most
|
||||
# node that is not list_.. .
|
||||
if_node = None
|
||||
comp_for = None
|
||||
comp_store = None
|
||||
@@ -1303,7 +1305,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
have_not = False
|
||||
while n in ('list_iter', 'comp_iter'):
|
||||
n = n[0] # recurse one step
|
||||
n = n[0] # iterate one nesting deeper
|
||||
if n in ('list_for', 'comp_for'):
|
||||
if n[2] == 'store':
|
||||
store = n[2]
|
||||
@@ -1329,8 +1331,20 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
else:
|
||||
self.preorder(store)
|
||||
|
||||
# FIXME this is all merely approximate
|
||||
# from trepan.api import debug; debug()
|
||||
self.write(' in ')
|
||||
self.preorder(node[-3])
|
||||
|
||||
if ast == 'list_comp':
|
||||
list_iter = ast[1]
|
||||
assert list_iter == 'list_iter'
|
||||
if list_iter == 'list_for':
|
||||
self.preorder(list_iter[3])
|
||||
self.prec = p
|
||||
return
|
||||
pass
|
||||
|
||||
if comp_store:
|
||||
self.preorder(comp_for)
|
||||
elif if_node:
|
||||
@@ -1338,6 +1352,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if have_not:
|
||||
self.write('not ')
|
||||
self.preorder(if_node)
|
||||
pass
|
||||
self.prec = p
|
||||
|
||||
def listcomprehension_walk2(self, node):
|
||||
|
Reference in New Issue
Block a user