You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-02 16:44:46 +08:00
Allow comments in grammar rules. Start working on Python3 class (not
finished). More test organization.
This commit is contained in:
BIN
test/bytecode_2.7/01_class.pyc
Normal file
BIN
test/bytecode_2.7/01_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_2.7/10_class.pyc
Normal file
BIN
test/bytecode_2.7/10_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4/01_class.pyc
Normal file
BIN
test/bytecode_3.4/01_class.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.4/02_def.pyc
Normal file
BIN
test/bytecode_3.4/02_def.pyc
Normal file
Binary file not shown.
@@ -1,2 +1,15 @@
|
||||
# Tests:
|
||||
#
|
||||
# For Python3:
|
||||
# classdef ::= LOAD_BUILD_CLASS mkfunc LOAD_CONST CALL_FUNCTION_2 designator
|
||||
# mkfunc ::= LOAD_CONST LOAD_CONST MAKE_FUNCTION_0
|
||||
|
||||
# For Python2:
|
||||
# classdef ::= LOAD_CONST expr mkfunc CALL_FUNCTION_0 BUILD_CLASS designator
|
||||
# mkfunc ::= LOAD_CONST MAKE_FUNCTION_0
|
||||
|
||||
class A:
|
||||
pass
|
||||
|
||||
# class B(Exception):
|
||||
# pass
|
||||
|
15
test/simple_source/def/02_def.py
Normal file
15
test/simple_source/def/02_def.py
Normal file
@@ -0,0 +1,15 @@
|
||||
def x0():
|
||||
pass
|
||||
|
||||
def x1(a):
|
||||
pass
|
||||
|
||||
|
||||
def x2(a=5):
|
||||
pass
|
||||
|
||||
def x3(a, b, c=5):
|
||||
pass
|
||||
|
||||
def x4(a, b=5, **c):
|
||||
pass
|
@@ -1,2 +0,0 @@
|
||||
def x0():
|
||||
pass
|
@@ -1,2 +0,0 @@
|
||||
def x1(a):
|
||||
pass
|
@@ -42,7 +42,7 @@ PYTHON_VERSION_STR = "%s.%s" % (sys.version_info[0], sys.version_info[1])
|
||||
sys.setrecursionlimit(5000)
|
||||
|
||||
def check_python_version(program):
|
||||
if not (sys.version_info[0:2] in ((2,6), (2,7), (3,4))):
|
||||
if not (sys.version_info[0:2] in ((2, 6), (2, 7), (3, 4))):
|
||||
print('Error: %s requires %s Python 2.6, 2.7 or 3.4' % program,
|
||||
file=sys.stderr)
|
||||
sys.exit(-1)
|
||||
|
@@ -47,7 +47,7 @@ def updateGlobal():
|
||||
globals().update({'PJIT': opmap['JUMP_IF_TRUE']})
|
||||
globals().update({'JA': opmap['JUMP_ABSOLUTE']})
|
||||
globals().update({'JF': opmap['JUMP_FORWARD']})
|
||||
globals().update(dict([(k.replace('+','_'),v) for (k,v) in opmap.items()]))
|
||||
globals().update(dict([(k.replace('+', '_'), v) for (k, v) in opmap.items()]))
|
||||
globals().update({'JUMP_OPs': map(lambda op: opname[op], hasjrel + hasjabs)})
|
||||
|
||||
# Instruction opcodes for compiled code
|
||||
|
@@ -47,7 +47,7 @@ def updateGlobal():
|
||||
globals().update({'PJIT': opmap['JUMP_IF_TRUE']})
|
||||
globals().update({'JA': opmap['JUMP_ABSOLUTE']})
|
||||
globals().update({'JF': opmap['JUMP_FORWARD']})
|
||||
globals().update(dict([(k.replace('+','_'),v) for (k,v) in opmap.items()]))
|
||||
globals().update(dict([(k.replace('+', '_'), v) for (k, v) in opmap.items()]))
|
||||
globals().update({'JUMP_OPs': map(lambda op: opname[op], hasjrel + hasjabs)})
|
||||
|
||||
# Instruction opcodes for compiled code
|
||||
|
@@ -43,7 +43,7 @@ def updateGlobal():
|
||||
globals().update({'PJIT': opmap['POP_JUMP_IF_TRUE']})
|
||||
globals().update({'JA': opmap['JUMP_ABSOLUTE']})
|
||||
globals().update({'JF': opmap['JUMP_FORWARD']})
|
||||
globals().update(dict([(k.replace('+','_'),v) for (k,v) in opmap.items()]))
|
||||
globals().update(dict([(k.replace('+', '_'), v) for (k, v) in opmap.items()]))
|
||||
globals().update({'JUMP_OPs': map(lambda op: opname[op], hasjrel + hasjabs)})
|
||||
|
||||
# Instruction opcodes for compiled code
|
||||
|
@@ -49,7 +49,7 @@ def updateGlobal():
|
||||
globals().update({'PJIT': opmap['POP_JUMP_IF_TRUE']})
|
||||
globals().update({'JA': opmap['JUMP_ABSOLUTE']})
|
||||
globals().update({'JF': opmap['JUMP_FORWARD']})
|
||||
globals().update(dict([(k.replace('+','_'),v) for (k,v) in opmap.items()]))
|
||||
globals().update(dict([(k.replace('+', '_'), v) for (k, v) in opmap.items()]))
|
||||
globals().update({'JUMP_OPs': map(lambda op: opname[op], hasjrel + hasjabs)})
|
||||
|
||||
# Instruction opcodes for compiled code
|
||||
|
@@ -17,7 +17,7 @@ class AST(UserList):
|
||||
def isNone(self):
|
||||
"""An AST None token. We can't use regular list comparisons
|
||||
because AST token offsets might be different"""
|
||||
return len(self.data) == 1 and self.data[0] == NoneToken
|
||||
return len(self.data) == 1 and NoneToken == self.data[0]
|
||||
|
||||
def __getslice__(self, low, high): return self.data[low:high]
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2015 Rocky Bernstein
|
||||
#
|
||||
# See LICENSE for lisence
|
||||
# See LICENSE for license
|
||||
"""
|
||||
A spark grammar for Python 2.x.
|
||||
|
||||
@@ -650,15 +650,15 @@ class Python2Parser(PythonParser):
|
||||
Special handling for opcodes that take a variable number
|
||||
of arguments -- we add a new rule for each:
|
||||
|
||||
build_list ::= {expr}^n BUILD_TUPLE_n
|
||||
build_list ::= {expr}^n BUILD_LIST_n
|
||||
build_list ::= {expr}^n BUILD_TUPLE_n
|
||||
unpack_list ::= UNPACK_LIST {expr}^n
|
||||
unpack ::= UNPACK_TUPLE {expr}^n
|
||||
unpack ::= UNPACK_SEQEUENCE {expr}^n
|
||||
|
||||
mkfunc ::= {expr}^n LOAD_CONST MAKE_FUNCTION_n
|
||||
mklambda ::= {expr}^n LOAD_LAMBDA MAKE_FUNCTION_n
|
||||
mkfunc ::= {expr}^n load_closure LOAD_CONST MAKE_FUNCTION_n
|
||||
mkfunc ::= {expr}^n LOAD_CONST MAKE_FUNCTION_n
|
||||
mklambda ::= {expr}^n LOAD_LAMBDA MAKE_FUNCTION_n
|
||||
mkfunc ::= {expr}^n load_closure LOAD_CONST MAKE_FUNCTION_n
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_n
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_VAR_n POP_TOP
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_VAR_KW_n POP_TOP
|
||||
|
@@ -53,6 +53,9 @@ class Python3Parser(PythonParser):
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
'''
|
||||
# Python3 adds LOAD_LISTCOMP and does list comprehension like
|
||||
# other comprehensions (set, dictionary).
|
||||
|
||||
expr ::= listcomp
|
||||
listcomp ::= LOAD_LISTCOMP LOAD_CONST MAKE_FUNCTION_0 expr GET_ITER CALL_FUNCTION_1
|
||||
|
||||
@@ -367,8 +370,9 @@ class Python3Parser(PythonParser):
|
||||
|
||||
kwarg ::= LOAD_CONST expr
|
||||
|
||||
classdef ::= LOAD_CONST expr mkfunc
|
||||
CALL_FUNCTION_0 BUILD_CLASS designator
|
||||
# Python3 introduced LOAD_BUILD_CLASS
|
||||
classdef ::= LOAD_BUILD_CLASS mkfunc LOAD_CONST
|
||||
CALL_FUNCTION_2 designator
|
||||
|
||||
stmt ::= classdefdeco
|
||||
classdefdeco ::= classdefdeco1 designator
|
||||
@@ -442,6 +446,7 @@ class Python3Parser(PythonParser):
|
||||
except_stmt ::= except_cond2 except_suite
|
||||
except_stmt ::= except
|
||||
|
||||
# Python3 introduced POP_EXCEPT
|
||||
except_suite ::= c_stmts_opt POP_EXCEPT JUMP_FORWARD
|
||||
except_suite ::= c_stmts_opt POP_EXCEPT jmp_abs
|
||||
except_suite ::= return_stmts
|
||||
@@ -580,6 +585,7 @@ class Python3Parser(PythonParser):
|
||||
load_attr ::= expr LOAD_ATTR
|
||||
get_iter ::= expr GET_ITER
|
||||
|
||||
# Python3 drops slice0..slice3
|
||||
buildslice3 ::= expr expr expr BUILD_SLICE_3
|
||||
buildslice2 ::= expr expr BUILD_SLICE_2
|
||||
|
||||
@@ -674,7 +680,6 @@ class Python3Parser(PythonParser):
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_VAR_KW_n POP_TOP
|
||||
expr ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||
"""
|
||||
|
||||
# from trepan.api import debug
|
||||
# debug(start_opts={'startup-profile': True})
|
||||
for token in tokens:
|
||||
|
@@ -1,5 +1,6 @@
|
||||
'''
|
||||
"""
|
||||
Copyright (c) 1998-2002 John Aycock
|
||||
Copyright (c) 2015 Rocky Bernstein
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
@@ -19,10 +20,11 @@ Copyright (c) 1998-2002 John Aycock
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'''
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import os, re
|
||||
|
||||
__version__ = 'SPARK-1.0 Python3 compatible'
|
||||
|
||||
def _namelist(instance):
|
||||
@@ -130,8 +132,13 @@ class GenericParser:
|
||||
def preprocess(self, rule, func):
|
||||
return rule, func
|
||||
|
||||
def addRule(self, doc, func, _preprocess=1):
|
||||
def addRule(self, doc, func, _preprocess=True):
|
||||
"""Add a grammar rules to self.rules, self.rule2func and self.rule2name"""
|
||||
fn = func
|
||||
|
||||
# remove blanks lines and comment lines, e.g. lines starting with "#"
|
||||
doc = os.linesep.join([s for s in doc.splitlines() if s and not re.match("^\s*#", s)])
|
||||
|
||||
rules = doc.split()
|
||||
|
||||
index = []
|
||||
|
@@ -35,7 +35,9 @@ else:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
from uncompyle6.parsers.spark import GenericASTTraversal, GenericASTTraversalPruningException
|
||||
from uncompyle6.parsers.spark import GenericASTTraversal, GenericASTTraversalPruningException, \
|
||||
DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
|
||||
from types import CodeType
|
||||
|
||||
from collections import namedtuple
|
||||
@@ -46,14 +48,15 @@ ExtractInfo = namedtuple("ExtractInfo",
|
||||
class Traverser(pysource.Walker, object):
|
||||
stacked_params = ('f', 'indent', 'isLambda', '_globals')
|
||||
|
||||
def __init__(self, version, scanner, showast=False):
|
||||
def __init__(self, version, scanner, showast=False,
|
||||
debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
GenericASTTraversal.__init__(self, ast=None)
|
||||
self.scanner = scanner
|
||||
params = {
|
||||
'f': StringIO(),
|
||||
'indent': '',
|
||||
}
|
||||
self.p = get_python_parser(version)
|
||||
self.p = get_python_parser(version, dict(debug_parser))
|
||||
self.showast = showast
|
||||
self.__params = params
|
||||
self.__param_stack = []
|
||||
@@ -563,7 +566,7 @@ class Traverser(pysource.Walker, object):
|
||||
|
||||
self.prune()
|
||||
|
||||
def gen_source_d(self, ast, name, customize, isLambda=0, returnNone=False):
|
||||
def gen_source(self, ast, name, customize, isLambda=0, returnNone=False):
|
||||
"""convert AST to source code"""
|
||||
|
||||
rn = self.return_none
|
||||
@@ -1146,91 +1149,104 @@ class Traverser(pysource.Walker, object):
|
||||
self.print_(self.indent, 'global ', g)
|
||||
self.mod_globs -= all_globals
|
||||
rn = ('None' in code.co_names) and not find_none(ast)
|
||||
self.gen_source_d(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
self.gen_source(ast, code.co_name, code._customize, isLambda=isLambda,
|
||||
returnNone=rn)
|
||||
code._tokens = None; code._customize = None # save memory
|
||||
|
||||
pass
|
||||
|
||||
def deparse_code(version, co, out=StringIO(), showasm=False, showast=False):
|
||||
def deparse_code(version, co, out=StringIO(), showasm=False, showast=False,
|
||||
showgrammar=False):
|
||||
|
||||
assert inspect.iscode(co)
|
||||
# store final output stream for case of error
|
||||
__real_out = out or sys.stdout
|
||||
scanner = get_scanner(version)
|
||||
|
||||
tokens, customize = scanner.disassemble(co)
|
||||
|
||||
# Build AST from disassembly.
|
||||
# walk = walker.Walker(out, scanner, showast=showast)
|
||||
walk = Traverser(version, scanner, showast=showast)
|
||||
tokens, customize = scanner.disassemble(co)
|
||||
if showasm:
|
||||
for t in tokens:
|
||||
print(t)
|
||||
|
||||
try:
|
||||
walk.ast = walk.build_ast_d(tokens, customize)
|
||||
except pysource.ParserError as e : # parser failed, dump disassembly
|
||||
print(e, file=__real_out)
|
||||
raise
|
||||
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||
debug_parser['reduce'] = showgrammar
|
||||
|
||||
# Build AST from disassembly.
|
||||
# deparsed = pysource.Walker(out, scanner, showast=showast)
|
||||
deparsed = Traverser(version, scanner, showast=showast, debug_parser=debug_parser)
|
||||
|
||||
deparsed.ast = deparsed.build_ast_d(tokens, customize)
|
||||
|
||||
assert deparsed.ast == 'stmts', 'Should have parsed grammar start'
|
||||
|
||||
del tokens # save memory
|
||||
|
||||
# convert leading '__doc__ = "..." into doc string
|
||||
assert walk.ast == 'stmts'
|
||||
walk.mod_globs = pysource.find_globals(walk.ast, set())
|
||||
walk.gen_source_d(walk.ast, co.co_name, customize)
|
||||
walk.set_pos_info(walk.ast, 0, len(walk.text))
|
||||
walk.fixup_parents(walk.ast, None)
|
||||
assert deparsed.ast == 'stmts'
|
||||
deparsed.mod_globs = pysource.find_globals(deparsed.ast, set())
|
||||
|
||||
for g in walk.mod_globs:
|
||||
walk.write('global %s ## Warning: Unused global' % g)
|
||||
if walk.ERROR:
|
||||
raise walk.ERROR
|
||||
# Just when you think we've forgotten about what we
|
||||
# were supposed to to: Generate source from AST!
|
||||
deparsed.gen_source(deparsed.ast, co.co_name, customize)
|
||||
|
||||
return walk
|
||||
deparsed.set_pos_info(deparsed.ast, 0, len(deparsed.text))
|
||||
deparsed.fixup_parents(deparsed.ast, None)
|
||||
|
||||
# if __name__ == '__main__':
|
||||
for g in deparsed.mod_globs:
|
||||
deparsed.write('# global %s ## Warning: Unused global' % g)
|
||||
if deparsed.ERROR:
|
||||
raise deparsed.ERROR
|
||||
|
||||
# def deparse_test(co):
|
||||
# sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
# walk = deparse_code(sys_version, co, showasm=False, showast=False)
|
||||
# print("deparsed source")
|
||||
# print(walk.text, "\n")
|
||||
# print('------------------------')
|
||||
# for name, offset in sorted(walk.offsets.keys(),
|
||||
# key=lambda x: str(x[0])):
|
||||
# print("name %s, offset %s" % (name, offset))
|
||||
# nodeInfo = walk.offsets[name, offset]
|
||||
# node = nodeInfo.node
|
||||
# extractInfo = walk.extract_node_info(node)
|
||||
# print("code: %s" % node.type)
|
||||
# # print extractInfo
|
||||
# print(extractInfo.selectedText)
|
||||
# print(extractInfo.selectedLine)
|
||||
# print(extractInfo.markerLine)
|
||||
# extractInfo, p = walk.extract_parent_info(node)
|
||||
# if extractInfo:
|
||||
# print("Contained in...")
|
||||
# print(extractInfo.selectedLine)
|
||||
# print(extractInfo.markerLine)
|
||||
# print("code: %s" % p.type)
|
||||
# print('=' * 40)
|
||||
# pass
|
||||
# pass
|
||||
# return
|
||||
return deparsed
|
||||
|
||||
# def get_code_for_fn(fn):
|
||||
# return fn.__code__
|
||||
if __name__ == '__main__':
|
||||
|
||||
# def gcd(a, b):
|
||||
# if a > b:
|
||||
# (a, b) = (b, a)
|
||||
# pass
|
||||
def deparse_test(co):
|
||||
sys_version = sys.version_info.major + (sys.version_info.minor / 10.0)
|
||||
walk = deparse_code(sys_version, co, showasm=False, showast=False,
|
||||
showgrammar=False)
|
||||
print("deparsed source")
|
||||
print(walk.text, "\n")
|
||||
print('------------------------')
|
||||
for name, offset in sorted(walk.offsets.keys(),
|
||||
key=lambda x: str(x[0])):
|
||||
print("name %s, offset %s" % (name, offset))
|
||||
nodeInfo = walk.offsets[name, offset]
|
||||
node = nodeInfo.node
|
||||
extractInfo = walk.extract_node_info(node)
|
||||
print("code: %s" % node.type)
|
||||
# print extractInfo
|
||||
print(extractInfo.selectedText)
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
extractInfo, p = walk.extract_parent_info(node)
|
||||
if extractInfo:
|
||||
print("Contained in...")
|
||||
print(extractInfo.selectedLine)
|
||||
print(extractInfo.markerLine)
|
||||
print("code: %s" % p.type)
|
||||
print('=' * 40)
|
||||
pass
|
||||
pass
|
||||
return
|
||||
|
||||
# if a <= 0:
|
||||
# return None
|
||||
# if a == 1 or a == b:
|
||||
# return a
|
||||
# return gcd(b-a, a)
|
||||
def get_code_for_fn(fn):
|
||||
return fn.__code__
|
||||
|
||||
# # check_args(['3', '5'])
|
||||
# deparse_test(get_code_for_fn(gcd))
|
||||
# # deparse_test(get_code_for_fn(gcd))
|
||||
# # deparse_test(get_code_for_fn(Traverser.fixup_offsets))
|
||||
# # deparse_test(inspect.currentframe().f_code)
|
||||
def gcd(a, b):
|
||||
if a > b:
|
||||
(a, b) = (b, a)
|
||||
pass
|
||||
|
||||
if a <= 0:
|
||||
return None
|
||||
if a == 1 or a == b:
|
||||
return a
|
||||
return gcd(b-a, a)
|
||||
|
||||
# check_args(['3', '5'])
|
||||
deparse_test(get_code_for_fn(gcd))
|
||||
# deparse_test(get_code_for_fn(gcd))
|
||||
# deparse_test(get_code_for_fn(Traverser.fixup_offsets))
|
||||
# deparse_test(inspect.currentframe().f_code)
|
||||
|
@@ -1052,8 +1052,12 @@ class Walker(GenericASTTraversal, object):
|
||||
|
||||
def n_classdef(self, node):
|
||||
# class definition ('class X(A,B,C):')
|
||||
|
||||
cclass = self.currentclass
|
||||
self.currentclass = str(node[0].pattr)
|
||||
if self.version > 3.0:
|
||||
self.currentclass = str(node[1][1].pattr)
|
||||
else:
|
||||
self.currentclass = str(node[0].pattr)
|
||||
|
||||
self.write('\n\n')
|
||||
self.write(self.indent, 'class ', self.currentclass)
|
||||
@@ -1062,7 +1066,11 @@ class Walker(GenericASTTraversal, object):
|
||||
|
||||
# class body
|
||||
self.indentMore()
|
||||
self.build_class(node[2][-2].attr)
|
||||
|
||||
if self.version > 3.0:
|
||||
self.build_class(node[1][0].attr)
|
||||
else:
|
||||
self.build_class(node[2][-2].attr)
|
||||
self.indentLess()
|
||||
|
||||
self.currentclass = cclass
|
||||
@@ -1552,7 +1560,6 @@ def deparse_code(version, co, out=sys.stdout, showasm=False, showast=False,
|
||||
debug_parser = dict(PARSER_DEFAULT_DEBUG)
|
||||
debug_parser['reduce'] = showgrammar
|
||||
|
||||
|
||||
# Build AST from disassembly.
|
||||
deparsed = Walker(version, out, scanner, showast=showast, debug_parser=debug_parser)
|
||||
|
||||
|
Reference in New Issue
Block a user