You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
PEP E225 with a nod to Deepcommit
This commit is contained in:
@@ -47,7 +47,7 @@ class PrintFake():
|
|||||||
out = out[:-self.pending_newlines]
|
out = out[:-self.pending_newlines]
|
||||||
self.f.write(out)
|
self.f.write(out)
|
||||||
def println(self, *data):
|
def println(self, *data):
|
||||||
if data and not(len(data) == 1 and data[0] ==''):
|
if data and not(len(data) == 1 and data[0] == ''):
|
||||||
self.write(*data)
|
self.write(*data)
|
||||||
self.pending_newlines = max(self.pending_newlines, 1)
|
self.pending_newlines = max(self.pending_newlines, 1)
|
||||||
return
|
return
|
||||||
|
@@ -80,7 +80,7 @@ def are_instructions_equal(i1, i2):
|
|||||||
|
|
||||||
:return: True if the two instructions are approximately equal, otherwise False.
|
:return: True if the two instructions are approximately equal, otherwise False.
|
||||||
"""
|
"""
|
||||||
result = (1==1
|
result = (1 == 1
|
||||||
and i1.opname == i2.opname
|
and i1.opname == i2.opname
|
||||||
and i1.opcode == i2.opcode
|
and i1.opcode == i2.opcode
|
||||||
and i1.arg == i2.arg
|
and i1.arg == i2.arg
|
||||||
|
@@ -18,7 +18,7 @@ a *= b; print a # a = a*b = 2
|
|||||||
a -= a; print a # a = a-a = 0
|
a -= a; print a # a = a-a = 0
|
||||||
a += 7*3; print a # == 21
|
a += 7*3; print a # == 21
|
||||||
|
|
||||||
l= [1,2,3]
|
l = [1,2,3]
|
||||||
l[1] *= 3; print l[1]; # 6
|
l[1] *= 3; print l[1]; # 6
|
||||||
l[1][2][3] = 7
|
l[1][2][3] = 7
|
||||||
l[1][2][3] *= 3;
|
l[1][2][3] *= 3;
|
||||||
|
@@ -76,7 +76,7 @@ def get_srcdir():
|
|||||||
src_dir = get_srcdir()
|
src_dir = get_srcdir()
|
||||||
os.chdir(src_dir)
|
os.chdir(src_dir)
|
||||||
|
|
||||||
files=[
|
files = [
|
||||||
'if',
|
'if',
|
||||||
'ifelse',
|
'ifelse',
|
||||||
# 'keyword',
|
# 'keyword',
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# We have to do contortions here because
|
# We have to do contortions here because
|
||||||
# lambda's have to be more or less on a line
|
# lambda's have to be more or less on a line
|
||||||
|
|
||||||
f = lambda x: 1 if x<2 else 3
|
f = lambda x: 1 if x < 2 else 3
|
||||||
assert f(3) == 3
|
assert f(3) == 3
|
||||||
assert f(1) == 1
|
assert f(1) == 1
|
||||||
|
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
# while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
# while1stmt ::= SETUP_LOOP l_stmts_opt JUMP_BACK COME_FROM
|
||||||
# while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE COME_FROM
|
# while1stmt ::= SETUP_LOOP l_stmts_opt CONTINUE COME_FROM
|
||||||
# which is included in later code generation
|
# which is included in later code generation
|
||||||
ms=0
|
ms = 0
|
||||||
if ms==1:
|
if ms == 1:
|
||||||
while 1:
|
while 1:
|
||||||
pass
|
pass
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
#Not detecting 2nd return is outside of
|
#Not detecting 2nd return is outside of
|
||||||
# if/then. Fix was to ensure COME_FROM
|
# if/then. Fix was to ensure COME_FROM
|
||||||
def return_return_bug(foo):
|
def return_return_bug(foo):
|
||||||
if foo =='say_hello':
|
if foo == 'say_hello':
|
||||||
return "hello"
|
return "hello"
|
||||||
return "world"
|
return "world"
|
||||||
|
|
||||||
|
@@ -20,7 +20,7 @@ a *= b; # print a # a = a*b = 2
|
|||||||
a -= a; # print a # a = a-a = 0
|
a -= a; # print a # a = a-a = 0
|
||||||
a += 7*3; # print a # == 21
|
a += 7*3; # print a # == 21
|
||||||
|
|
||||||
l= [1,2,3]
|
l = [1,2,3]
|
||||||
l[1] *= 3; # print l[1]; # 6
|
l[1] *= 3; # print l[1]; # 6
|
||||||
l[1][2][3] = 7
|
l[1][2][3] = 7
|
||||||
l[1][2][3] *= 3;
|
l[1][2][3] *= 3;
|
||||||
|
@@ -28,7 +28,7 @@ def foo():
|
|||||||
z = {}
|
z = {}
|
||||||
|
|
||||||
def a():
|
def a():
|
||||||
b =1
|
b = 1
|
||||||
global z
|
global z
|
||||||
del z
|
del z
|
||||||
def b(y):
|
def b(y):
|
||||||
|
@@ -36,7 +36,7 @@ python_versions = [v for v in magics.python_versions if
|
|||||||
# FIXME: we should remove Python versions that we don't support.
|
# FIXME: we should remove Python versions that we don't support.
|
||||||
# These include Jython, and Python bytecode changes pre release.
|
# These include Jython, and Python bytecode changes pre release.
|
||||||
|
|
||||||
TEST_VERSIONS=(
|
TEST_VERSIONS = (
|
||||||
'pypy-2.4.0', 'pypy-2.6.1',
|
'pypy-2.4.0', 'pypy-2.6.1',
|
||||||
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
|
'pypy-5.0.1', 'pypy-5.3.1', 'pypy3.5-5.7.1-beta',
|
||||||
'native') + tuple(python_versions)
|
'native') + tuple(python_versions)
|
||||||
|
@@ -467,7 +467,7 @@ class Python2Parser(PythonParser):
|
|||||||
pass
|
pass
|
||||||
self.add_unique_rules([
|
self.add_unique_rules([
|
||||||
('mkfunc ::= %s load_closure LOAD_CONST %s' %
|
('mkfunc ::= %s load_closure LOAD_CONST %s' %
|
||||||
('expr '* token.attr, opname))], customize)
|
('expr ' * token.attr, opname))], customize)
|
||||||
|
|
||||||
if self.version >= 2.7:
|
if self.version >= 2.7:
|
||||||
if i > 0:
|
if i > 0:
|
||||||
|
@@ -874,12 +874,12 @@ class Python3Parser(PythonParser):
|
|||||||
j = 2
|
j = 2
|
||||||
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'):
|
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'):
|
||||||
rule_pat = ('mklambda ::= %sload_closure LOAD_LAMBDA %%s%s' %
|
rule_pat = ('mklambda ::= %sload_closure LOAD_LAMBDA %%s%s' %
|
||||||
('pos_arg '* args_pos, opname))
|
('pos_arg ' * args_pos, opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
"GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
@@ -899,7 +899,7 @@ class Python3Parser(PythonParser):
|
|||||||
if (is_pypy or (i >= j and tokens[i-j] == 'LOAD_DICTCOMP')):
|
if (is_pypy or (i >= j and tokens[i-j] == 'LOAD_DICTCOMP')):
|
||||||
self.add_unique_rule('dict_comp ::= %sload_closure LOAD_DICTCOMP %s '
|
self.add_unique_rule('dict_comp ::= %sload_closure LOAD_DICTCOMP %s '
|
||||||
'expr GET_ITER CALL_FUNCTION_1' %
|
'expr GET_ITER CALL_FUNCTION_1' %
|
||||||
('pos_arg '* args_pos, opname),
|
('pos_arg ' * args_pos, opname),
|
||||||
opname, token.attr, customize)
|
opname, token.attr, customize)
|
||||||
|
|
||||||
if args_kw > 0:
|
if args_kw > 0:
|
||||||
@@ -961,10 +961,10 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr "
|
rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
"GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
rule_pat = ("generator_exp ::= %sload_closure load_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
"GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LISTCOMP'):
|
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LISTCOMP'):
|
||||||
if self.version >= 3.6:
|
if self.version >= 3.6:
|
||||||
@@ -980,8 +980,8 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LAMBDA'):
|
if is_pypy or (i >= 2 and tokens[i-2] == 'LOAD_LAMBDA'):
|
||||||
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
||||||
(('pos_arg '* args_pos),
|
(('pos_arg ' * args_pos),
|
||||||
('kwarg '* args_kw),
|
('kwarg ' * args_kw),
|
||||||
opname))
|
opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
continue
|
continue
|
||||||
@@ -998,7 +998,7 @@ class Python3Parser(PythonParser):
|
|||||||
|
|
||||||
if has_get_iter_call_function1:
|
if has_get_iter_call_function1:
|
||||||
rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr "
|
rule_pat = ("generator_exp ::= %sload_genexpr %%s%s expr "
|
||||||
"GET_ITER CALL_FUNCTION_1" % ('pos_arg '* args_pos, opname))
|
"GET_ITER CALL_FUNCTION_1" % ('pos_arg ' * args_pos, opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LISTCOMP'):
|
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LISTCOMP'):
|
||||||
@@ -1014,8 +1014,8 @@ class Python3Parser(PythonParser):
|
|||||||
# FIXME: Fold test into add_make_function_rule
|
# FIXME: Fold test into add_make_function_rule
|
||||||
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'):
|
if is_pypy or (i >= j and tokens[i-j] == 'LOAD_LAMBDA'):
|
||||||
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
rule_pat = ('mklambda ::= %s%sLOAD_LAMBDA %%s%s' %
|
||||||
(('pos_arg '* args_pos),
|
(('pos_arg ' * args_pos),
|
||||||
('kwarg '* args_kw),
|
('kwarg ' * args_kw),
|
||||||
opname))
|
opname))
|
||||||
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
self.add_make_function_rule(rule_pat, opname, token.attr, customize)
|
||||||
|
|
||||||
|
@@ -348,7 +348,7 @@ class Python36Parser(Python35Parser):
|
|||||||
build_tuple_unpack_with_call
|
build_tuple_unpack_with_call
|
||||||
%s
|
%s
|
||||||
CALL_FUNCTION_EX
|
CALL_FUNCTION_EX
|
||||||
""" % 'expr '* token.attr, nop_func)
|
""" % 'expr ' * token.attr, nop_func)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# FIXME: Is this right?
|
# FIXME: Is this right?
|
||||||
|
@@ -271,7 +271,7 @@ class Scanner(object):
|
|||||||
code = self.code
|
code = self.code
|
||||||
# Make sure requested positions do not go out of
|
# Make sure requested positions do not go out of
|
||||||
# code bounds
|
# code bounds
|
||||||
if not (start>=0 and end<=len(code)):
|
if not (start >= 0 and end <= len(code)):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -435,7 +435,7 @@ class Scanner(object):
|
|||||||
# j = self.stmts.index(inst.offset)
|
# j = self.stmts.index(inst.offset)
|
||||||
# self.lines[j] = offset
|
# self.lines[j] = offset
|
||||||
|
|
||||||
new_inst= inst._replace(starts_line=starts_line,
|
new_inst = inst._replace(starts_line=starts_line,
|
||||||
is_jump_target=is_jump_target,
|
is_jump_target=is_jump_target,
|
||||||
offset=offset)
|
offset=offset)
|
||||||
inst = new_inst
|
inst = new_inst
|
||||||
|
@@ -1179,7 +1179,7 @@ class Scanner2(Scanner):
|
|||||||
Return a list with indexes to them or [] if none found.
|
Return a list with indexes to them or [] if none found.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert(start>=0 and end<=len(self.code) and start <= end)
|
assert(start >= 0 and end <= len(self.code) and start <= end)
|
||||||
|
|
||||||
try: None in instr
|
try: None in instr
|
||||||
except: instr = [instr]
|
except: instr = [instr]
|
||||||
|
@@ -1069,7 +1069,7 @@ class Scanner3(Scanner):
|
|||||||
optionally <target>ing specified offset, and return list found
|
optionally <target>ing specified offset, and return list found
|
||||||
<instr> offsets which are not within any POP_JUMP_IF_TRUE jumps.
|
<instr> offsets which are not within any POP_JUMP_IF_TRUE jumps.
|
||||||
"""
|
"""
|
||||||
assert(start>=0 and end<=len(self.code) and start <= end)
|
assert(start >= 0 and end <= len(self.code) and start <= end)
|
||||||
|
|
||||||
# Find all offsets of requested instructions
|
# Find all offsets of requested instructions
|
||||||
instr_offsets = self.inst_matches(start, end, instr, target,
|
instr_offsets = self.inst_matches(start, end, instr, target,
|
||||||
|
@@ -93,7 +93,7 @@ class Token():
|
|||||||
if not self.pattr.startswith('to '):
|
if not self.pattr.startswith('to '):
|
||||||
pattr = "to " + self.pattr
|
pattr = "to " + self.pattr
|
||||||
elif self.op in self.opc.JABS_OPS:
|
elif self.op in self.opc.JABS_OPS:
|
||||||
self.pattr= str(self.pattr)
|
self.pattr = str(self.pattr)
|
||||||
if not self.pattr.startswith('to '):
|
if not self.pattr.startswith('to '):
|
||||||
pattr = "to " + str(self.pattr)
|
pattr = "to " + str(self.pattr)
|
||||||
pass
|
pass
|
||||||
|
@@ -30,7 +30,7 @@ class AligningWalker(SourceWalker, object):
|
|||||||
self.current_line_number = 0
|
self.current_line_number = 0
|
||||||
|
|
||||||
def println(self, *data):
|
def println(self, *data):
|
||||||
if data and not(len(data) == 1 and data[0] ==''):
|
if data and not(len(data) == 1 and data[0] == ''):
|
||||||
self.write(*data)
|
self.write(*data)
|
||||||
|
|
||||||
self.pending_newlines = max(self.pending_newlines, 1)
|
self.pending_newlines = max(self.pending_newlines, 1)
|
||||||
|
@@ -586,7 +586,7 @@ class FragmentsWalker(pysource.SourceWalker, object):
|
|||||||
n = n[0] # recurse one step
|
n = n[0] # recurse one step
|
||||||
if n == 'list_for': n = n[3]
|
if n == 'list_for': n = n[3]
|
||||||
elif n == 'list_if': n = n[2]
|
elif n == 'list_if': n = n[2]
|
||||||
elif n == 'list_if_not': n= n[2]
|
elif n == 'list_if_not': n = n[2]
|
||||||
assert n == 'lc_body'
|
assert n == 'lc_body'
|
||||||
if node[0].kind.startswith('BUILD_LIST'):
|
if node[0].kind.startswith('BUILD_LIST'):
|
||||||
start = len(self.f.getvalue())
|
start = len(self.f.getvalue())
|
||||||
|
@@ -395,7 +395,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
self.f.write(out)
|
self.f.write(out)
|
||||||
|
|
||||||
def println(self, *data):
|
def println(self, *data):
|
||||||
if data and not(len(data) == 1 and data[0] ==''):
|
if data and not(len(data) == 1 and data[0] == ''):
|
||||||
self.write(*data)
|
self.write(*data)
|
||||||
self.pending_newlines = max(self.pending_newlines, 1)
|
self.pending_newlines = max(self.pending_newlines, 1)
|
||||||
|
|
||||||
@@ -855,7 +855,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
n = n[0] # iterate one nesting deeper
|
n = n[0] # iterate one nesting deeper
|
||||||
if n == 'list_for': n = n[3]
|
if n == 'list_for': n = n[3]
|
||||||
elif n == 'list_if': n = n[2]
|
elif n == 'list_if': n = n[2]
|
||||||
elif n == 'list_if_not': n= n[2]
|
elif n == 'list_if_not': n = n[2]
|
||||||
assert n == 'lc_body'
|
assert n == 'lc_body'
|
||||||
self.write( '[ ')
|
self.write( '[ ')
|
||||||
|
|
||||||
@@ -915,7 +915,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
n = n[0] # iterate one nesting deeper
|
n = n[0] # iterate one nesting deeper
|
||||||
if n == 'list_for': n = n[3]
|
if n == 'list_for': n = n[3]
|
||||||
elif n == 'list_if': n = n[2]
|
elif n == 'list_if': n = n[2]
|
||||||
elif n == 'list_if_not': n= n[2]
|
elif n == 'list_if_not': n = n[2]
|
||||||
assert n == 'lc_body'
|
assert n == 'lc_body'
|
||||||
self.write( '[ ')
|
self.write( '[ ')
|
||||||
|
|
||||||
@@ -1048,7 +1048,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
while (len(ast) == 1
|
while (len(ast) == 1
|
||||||
or (ast in ('sstmt', 'return')
|
or (ast in ('sstmt', 'return')
|
||||||
and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))):
|
and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))):
|
||||||
self.prec=100
|
self.prec = 100
|
||||||
ast = ast[0]
|
ast = ast[0]
|
||||||
|
|
||||||
# Pick out important parts of the comprehension:
|
# Pick out important parts of the comprehension:
|
||||||
@@ -1188,7 +1188,7 @@ class SourceWalker(GenericASTTraversal, object):
|
|||||||
while (len(ast) == 1
|
while (len(ast) == 1
|
||||||
or (ast in ('sstmt', 'return')
|
or (ast in ('sstmt', 'return')
|
||||||
and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))):
|
and ast[-1] in ('RETURN_LAST', 'RETURN_VALUE'))):
|
||||||
self.prec=100
|
self.prec = 100
|
||||||
ast = ast[0]
|
ast = ast[0]
|
||||||
|
|
||||||
n = ast[1]
|
n = ast[1]
|
||||||
|
@@ -12,4 +12,4 @@
|
|||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
# This file is suitable for sourcing inside bash as
|
# This file is suitable for sourcing inside bash as
|
||||||
# well as importing into Python
|
# well as importing into Python
|
||||||
VERSION='3.2.6'
|
VERSION='3.2.6' # noqa
|
||||||
|
Reference in New Issue
Block a user