You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 00:45:53 +08:00
More python2 -> python3 compatibility
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
*.pyc
|
||||
*~
|
||||
/.python-version
|
||||
/uncompyle6.egg-info
|
||||
build
|
||||
|
56
__pkginfo__.py
Normal file
56
__pkginfo__.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""uncompyle6 packaging information"""
|
||||
|
||||
# To the extent possible we make this file look more like a
|
||||
# configuration file rather than code like setup.py. I find putting
|
||||
# configuration stuff in the middle of a function call in setup.py,
|
||||
# which for example requires commas in between parameters, is a little
|
||||
# less elegant than having it here with reduced code, albeit there
|
||||
# still is some room for improvement.
|
||||
|
||||
# Things that change more often go here.
|
||||
copyright = """
|
||||
Copyright (C) 2015 Rocky Bernstein <rocky@gnu.org>.
|
||||
"""
|
||||
|
||||
# classifiers = ['Development Status :: 5 - Production/Stable',
|
||||
# 'Environment :: Console',
|
||||
# 'Intended Audience :: Developers',
|
||||
# 'License :: OSI Approved :: GNU General Public License (GPL)',
|
||||
# 'Operating System :: OS Independent',
|
||||
# 'Programming Language :: Python',
|
||||
# 'Topic :: Software Development :: Debuggers',
|
||||
# 'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
# ]
|
||||
|
||||
# The rest in alphabetic order
|
||||
author = "Rocky Bernstein"
|
||||
author_email = "rocky@gnu.org"
|
||||
ftp_url = None
|
||||
# license = 'GPL'
|
||||
mailing_list = 'python-debugger@googlegroups.com'
|
||||
modname = 'uncompyle6'
|
||||
packages = ['uncompyle6', 'uncompyle6.opcodes']
|
||||
py_modules = None
|
||||
short_desc = 'Python byte-code to source-code converter'
|
||||
|
||||
import os
|
||||
import os.path, sys
|
||||
|
||||
|
||||
def get_srcdir():
|
||||
filename = os.path.normcase(os.path.dirname(os.path.abspath(__file__)))
|
||||
return os.path.realpath(filename)
|
||||
|
||||
# VERSION.py sets variable VERSION.
|
||||
ns = {}
|
||||
version = '2.0'
|
||||
web = 'https://github.com/rocky/uncompyle6/'
|
||||
|
||||
# tracebacks in zip files are funky and not debuggable
|
||||
zip_safe = False
|
||||
|
||||
|
||||
def read(*rnames):
|
||||
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
|
||||
|
||||
long_description = ( read("README.rst") + '\n' )
|
43
setup.py
43
setup.py
@@ -1,15 +1,38 @@
|
||||
#! python
|
||||
|
||||
"""Setup script for the 'uncompyle' distribution."""
|
||||
"""Setup script for the 'uncompyle6' distribution."""
|
||||
|
||||
from distutils.core import setup, Extension
|
||||
|
||||
setup (name = "uncompyle6",
|
||||
version = "2.0",
|
||||
description = "Python byte-code to source-code converter",
|
||||
author = "Mysterie",
|
||||
author_email = "kajusska@gmail.com",
|
||||
url = "http://github.com/Mysterie/uncompyle2",
|
||||
packages=['uncompyle6', 'uncompyle6.opcode'],
|
||||
scripts=['scripts/uncompyle6']
|
||||
)
|
||||
# Get the package information used in setup().
|
||||
# from __pkginfo__ import \
|
||||
# author, author_email, classifiers, \
|
||||
# install_requires, license, long_description, \
|
||||
# modname, packages, py_modules, \
|
||||
# short_desc, version, web, zip_safe
|
||||
|
||||
from __pkginfo__ import \
|
||||
author, author_email, \
|
||||
long_description, \
|
||||
modname, packages, py_modules, \
|
||||
short_desc, version, web, zip_safe
|
||||
|
||||
__import__('pkg_resources')
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
author = author,
|
||||
author_email = author_email,
|
||||
# classifiers = classifiers,
|
||||
description = short_desc,
|
||||
# install_requires = install_requires,
|
||||
# license = license,
|
||||
long_description = long_description,
|
||||
py_modules = py_modules,
|
||||
name = modname,
|
||||
packages = packages,
|
||||
test_suite = 'nose.collector',
|
||||
url = web,
|
||||
setup_requires = ['nose>=1.0'],
|
||||
version = version,
|
||||
zip_safe = zip_safe)
|
||||
|
@@ -1,4 +1,6 @@
|
||||
#! python
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
|
||||
"""
|
||||
compile_tests -- compile test patterns for the decompyle test suite
|
||||
"""
|
||||
@@ -20,8 +22,8 @@ for opt, val in opts:
|
||||
if args:
|
||||
raise 'This tool does not want any arguments'
|
||||
|
||||
print "Using files in dir %s" % src_dir
|
||||
print "Compiling into dir %s" % work_dir
|
||||
print("Using files in dir %s" % src_dir)
|
||||
print("Compiling into dir %s" % work_dir)
|
||||
|
||||
tests = {}
|
||||
|
||||
@@ -46,6 +48,7 @@ tests['2.3'] = tests['2.2']
|
||||
tests['2.5'] = tests['2.3']
|
||||
tests['2.6'] = tests['2.5']
|
||||
tests['2.7'] = ['mine'] + tests['2.6']
|
||||
tests['3.4'] = ['mine']
|
||||
total_tests = len(tests['2.7'])
|
||||
#tests['2.2'].sort(); print tests['2.2']
|
||||
|
||||
@@ -68,7 +71,7 @@ try:
|
||||
except AttributeError:
|
||||
version = sys.version[:3]
|
||||
|
||||
print 'Compiling test files for Python', version,
|
||||
print '(%i/%i files)' % (len(tests[version]), total_tests)
|
||||
print('Compiling test files for Python', version)
|
||||
print('(%i/%i files)' % (len(tests[version]), total_tests))
|
||||
compile_for_version(version)
|
||||
print 'Done.'
|
||||
print('Done.')
|
||||
|
@@ -1,4 +1,5 @@
|
||||
#! python
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
|
||||
'''
|
||||
test_pythonlib -- uncompyle and verify Python libraries
|
||||
@@ -19,7 +20,7 @@ Step 2: Run the test:
|
||||
test_pythonlib --mylib --verify # decompile verify 'mylib'
|
||||
'''
|
||||
|
||||
from uncompyle2 import main, verify
|
||||
from uncompyle6 import main, verify
|
||||
import getopt, sys
|
||||
import os, time, shutil
|
||||
from fnmatch import fnmatch
|
||||
@@ -44,11 +45,11 @@ test_options = {
|
||||
#-----
|
||||
|
||||
def help():
|
||||
print 'Usage-Examples:'
|
||||
print 'test_pythonlib --all # decompile all tests (suite + libs)'
|
||||
print 'test_pythonlib --all --verify # decomyile all tests and verify results'
|
||||
print 'test_pythonlib --test # decompile only the testsuite'
|
||||
print 'test_pythonlib --2.2 --verify # decompile and verify python lib 2.2'
|
||||
print('Usage-Examples:')
|
||||
print('test_pythonlib --all # decompile all tests (suite + libs)')
|
||||
print('test_pythonlib --all --verify # decomyile all tests and verify results')
|
||||
print('test_pythonlib --test # decompile only the testsuite')
|
||||
print('test_pythonlib --2.2 --verify # decompile and verify python lib 2.2')
|
||||
|
||||
def do_tests(src_dir, patterns, target_dir, start_with=None, do_verify=0):
|
||||
def visitor(files, dirname, names):
|
||||
@@ -69,12 +70,12 @@ def do_tests(src_dir, patterns, target_dir, start_with=None, do_verify=0):
|
||||
try:
|
||||
start_with = files.index(start_with)
|
||||
files = files[start_with:]
|
||||
print '>>> starting with file', files[0]
|
||||
print('>>> starting with file', files[0])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
print time.ctime()
|
||||
print 'Working directory: ', src_dir
|
||||
print(time.ctime())
|
||||
print('Working directory: ', src_dir)
|
||||
try:
|
||||
main(src_dir, target_dir, files, [], do_verify=do_verify)
|
||||
except (KeyboardInterrupt, OSError):
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import uncompyle2
|
||||
from uncompyle2 import uncompyle, walker, verify, magics
|
||||
from uncompyle2.spark import GenericASTTraversal, GenericASTTraversalPruningException
|
||||
import uncompyle6
|
||||
from uncompyle6 import uncompyle, walker, verify, magics
|
||||
from uncompyle6.spark import GenericASTTraversal, GenericASTTraversalPruningException
|
||||
import sys, inspect, types, cStringIO
|
||||
|
||||
from collections import namedtuple
|
||||
@@ -137,7 +137,7 @@ def uncompyle_find(version, co, find_offset, out=sys.stdout, showasm=0, showast=
|
||||
# store final output stream for case of error
|
||||
__real_out = out or sys.stdout
|
||||
if version == 2.7:
|
||||
import uncompyle2.scanner27 as scan
|
||||
import uncompyle6.scanner27 as scan
|
||||
scanner = scan.Scanner27()
|
||||
elif version == 2.6:
|
||||
import scanner26 as scan
|
||||
|
@@ -1,3 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
'''
|
||||
Copyright (c) 1999 John Aycock
|
||||
Copyright (c) 2000 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
@@ -68,9 +71,9 @@ def _load_module(filename):
|
||||
try:
|
||||
version = float(magics.versions[magic])
|
||||
except KeyError:
|
||||
raise ImportError, "Unknown magic number %s in %s" % (ord(magic[0])+256*ord(magic[1]), filename)
|
||||
raise ImportError("Unknown magic number %s in %s" % (ord(magic[0])+256*ord(magic[1]), filename))
|
||||
if (version > 2.7) or (version < 2.5):
|
||||
raise ImportError, "This is a Python %s file! Only Python 2.5 to 2.7 files are supported." % version
|
||||
raise ImportError("This is a Python %s file! Only Python 2.5 to 2.7 files are supported." % version)
|
||||
#print version
|
||||
fp.read(4) # timestamp
|
||||
co = dis.marshalLoad(fp)
|
||||
@@ -105,7 +108,7 @@ def uncompyle(version, co, out=None, showasm=0, showast=0):
|
||||
walk = walker.Walker(out, scanner, showast=showast)
|
||||
try:
|
||||
ast = walk.build_ast(tokens, customize)
|
||||
except walker.ParserError, e : # parser failed, dump disassembly
|
||||
except walker.ParserError as e : # parser failed, dump disassembly
|
||||
print >>__real_out, e
|
||||
raise
|
||||
del tokens # save memory
|
||||
@@ -223,9 +226,9 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
if do_verify:
|
||||
try:
|
||||
verify.compare_code_with_srcfile(infile, outfile)
|
||||
if not outfile: print '\n# okay decompyling', infile, __memUsage()
|
||||
if not outfile: print('\n# okay decompyling', infile, __memUsage())
|
||||
okay_files += 1
|
||||
except verify.VerifyCmpError, e:
|
||||
except verify.VerifyCmpError as e:
|
||||
verify_failed_files += 1
|
||||
os.rename(outfile, outfile + '_unverified')
|
||||
if not outfile:
|
||||
@@ -233,7 +236,7 @@ def main(in_base, out_base, files, codes, outfile=None,
|
||||
print >>sys.stderr, e
|
||||
else:
|
||||
okay_files += 1
|
||||
if not outfile: print '\n# okay decompyling', infile, __memUsage()
|
||||
if not outfile: print('\n# okay decompyling', infile, __memUsage())
|
||||
if outfile:
|
||||
sys.stdout.write("decompiled %i files: %i okay, %i failed, %i verify failed\r" % (tot_files, okay_files, failed_files, verify_failed_files))
|
||||
sys.stdout.flush()
|
||||
|
@@ -1,16 +1,20 @@
|
||||
from __future__ import print_function
|
||||
import struct
|
||||
|
||||
import struct, sys
|
||||
|
||||
__all__ = ['magics', 'versions']
|
||||
|
||||
def __build_magic(magic):
|
||||
if (sys.version_info > (3, 0)):
|
||||
return struct.pack('Hcc', magic, bytes('\r', 'utf-8'), bytes('\n', 'utf-8'))
|
||||
else:
|
||||
return struct.pack('Hcc', magic, '\r', '\n')
|
||||
|
||||
by_magic = {}
|
||||
by_version = {}
|
||||
|
||||
def __by_version(magics):
|
||||
for m, v in magics.items():
|
||||
for m, v in list(magics.items()):
|
||||
by_magic[m] = v
|
||||
by_version[v] = m
|
||||
return by_version
|
||||
@@ -77,6 +81,7 @@ def test():
|
||||
magic_20 = magics['2.0']
|
||||
current = imp.get_magic()
|
||||
current_version = struct.unpack('HBB', current)[0]
|
||||
from trepan.api import debug; debug()
|
||||
magic_current = by_magic[ current ]
|
||||
print(type(magic_20), len(magic_20), repr(magic_20))
|
||||
print()
|
||||
|
@@ -1,3 +1,5 @@
|
||||
from __future__ import print_function
|
||||
|
||||
'''
|
||||
Copyright (c) 1999 John Aycock
|
||||
Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
@@ -9,8 +11,13 @@
|
||||
__all__ = ['parse', 'AST', 'ParserError', 'Parser']
|
||||
|
||||
from spark import GenericASTBuilder
|
||||
import string, exceptions, sys
|
||||
from UserList import UserList
|
||||
import string, sys
|
||||
|
||||
if (sys.version_info > (3, 0)):
|
||||
intern = sys.intern
|
||||
from collections import UserList
|
||||
else:
|
||||
from UserList import UserList
|
||||
|
||||
from scanner import Token
|
||||
|
||||
@@ -47,6 +54,7 @@ class ParserError(Exception):
|
||||
|
||||
|
||||
class Parser(GenericASTBuilder):
|
||||
|
||||
def __init__(self):
|
||||
GenericASTBuilder.__init__(self, AST, 'stmts')
|
||||
self.customized = {}
|
||||
@@ -57,7 +65,7 @@ class Parser(GenericASTBuilder):
|
||||
collector to collect this object.
|
||||
"""
|
||||
for dict in (self.rule2func, self.rules, self.rule2name):
|
||||
for i in dict.keys():
|
||||
for i in list(dict.keys()):
|
||||
dict[i] = None
|
||||
for i in dir(self):
|
||||
setattr(self, i, None)
|
||||
@@ -704,7 +712,7 @@ class Parser(GenericASTBuilder):
|
||||
|
||||
def __ambiguity(self, children):
|
||||
# only for debugging! to be removed hG/2000-10-15
|
||||
print children
|
||||
print(children)
|
||||
return GenericASTBuilder.ambiguity(self, children)
|
||||
|
||||
def resolve(self, list):
|
||||
@@ -737,9 +745,9 @@ def parse(tokens, customize):
|
||||
# expr ::= expr {expr}^n CALL_FUNCTION_KW_n POP_TOP
|
||||
#
|
||||
global p
|
||||
for k, v in customize.items():
|
||||
for k, v in list(customize.items()):
|
||||
# avoid adding the same rule twice to this parser
|
||||
if p.customized.has_key(k):
|
||||
if k in p.customized:
|
||||
continue
|
||||
p.customized[k] = None
|
||||
|
||||
|
@@ -12,7 +12,7 @@ from array import array
|
||||
from operator import itemgetter
|
||||
from struct import *
|
||||
|
||||
from uncompyle2.opcode.opcode_25 import *
|
||||
from uncompyle6.opcodes.opcode_25 import *
|
||||
import disas as dis
|
||||
import scanner as scan
|
||||
|
||||
|
@@ -12,7 +12,7 @@ from array import array
|
||||
from operator import itemgetter
|
||||
from struct import *
|
||||
|
||||
from uncompyle2.opcode.opcode_26 import *
|
||||
from uncompyle6.opcodes.opcode_26 import *
|
||||
import disas as dis
|
||||
import scanner as scan
|
||||
|
||||
@@ -894,4 +894,3 @@ class Scanner26(scan.Scanner):
|
||||
label = self.fixed_jumps[i]
|
||||
targets[label] = targets.get(label, []) + [i]
|
||||
return targets
|
||||
|
||||
|
@@ -11,7 +11,7 @@ from collections import namedtuple
|
||||
from array import array
|
||||
from operator import itemgetter
|
||||
|
||||
from uncompyle2.opcode.opcode_27 import *
|
||||
from uncompyle6.opcodes.opcode_27 import *
|
||||
import disas as dis
|
||||
import scanner as scan
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
from __future__ import print_function
|
||||
'''
|
||||
Copyright (c) 1998-2002 John Aycock
|
||||
|
||||
@@ -28,31 +29,31 @@ def _namelist(instance):
|
||||
for c in classlist:
|
||||
for b in c.__bases__:
|
||||
classlist.append(b)
|
||||
for name in c.__dict__.keys():
|
||||
if not namedict.has_key(name):
|
||||
for name in list(c.__dict__.keys()):
|
||||
if name not in namedict:
|
||||
namelist.append(name)
|
||||
namedict[name] = 1
|
||||
return namelist
|
||||
|
||||
#
|
||||
# Extracted from GenericParser and made global so that [un]picking works.
|
||||
#
|
||||
class _State:
|
||||
'''
|
||||
Extracted from GenericParser and made global so that [un]picking works.
|
||||
'''
|
||||
def __init__(self, stateno, items):
|
||||
self.T, self.complete, self.items = [], [], items
|
||||
self.stateno = stateno
|
||||
|
||||
class GenericParser:
|
||||
#
|
||||
# An Earley parser, as per J. Earley, "An Efficient Context-Free
|
||||
# Parsing Algorithm", CACM 13(2), pp. 94-102. Also J. C. Earley,
|
||||
# "An Efficient Context-Free Parsing Algorithm", Ph.D. thesis,
|
||||
# Carnegie-Mellon University, August 1968. New formulation of
|
||||
# the parser according to J. Aycock, "Practical Earley Parsing
|
||||
# and the SPARK Toolkit", Ph.D. thesis, University of Victoria,
|
||||
# 2001, and J. Aycock and R. N. Horspool, "Practical Earley
|
||||
# Parsing", unpublished paper, 2001.
|
||||
#
|
||||
'''
|
||||
An Earley parser, as per J. Earley, "An Efficient Context-Free
|
||||
Parsing Algorithm", CACM 13(2), pp. 94-102. Also J. C. Earley,
|
||||
"An Efficient Context-Free Parsing Algorithm", Ph.D. thesis,
|
||||
Carnegie-Mellon University, August 1968. New formulation of
|
||||
the parser according to J. Aycock, "Practical Earley Parsing
|
||||
and the SPARK Toolkit", Ph.D. thesis, University of Victoria,
|
||||
2001, and J. Aycock and R. N. Horspool, "Practical Earley
|
||||
Parsing", unpublished paper, 2001.
|
||||
'''
|
||||
|
||||
def __init__(self, start):
|
||||
self.rules = {}
|
||||
@@ -90,7 +91,7 @@ class GenericParser:
|
||||
changes = 1
|
||||
while changes:
|
||||
changes = 0
|
||||
for k, v in self.edges.items():
|
||||
for k, v in list(self.edges.items()):
|
||||
if v is None:
|
||||
state, sym = k
|
||||
if self.states.has_key(state):
|
||||
@@ -127,12 +128,12 @@ class GenericParser:
|
||||
rules = doc.split()
|
||||
|
||||
index = []
|
||||
for i in xrange(len(rules)):
|
||||
for i in range(len(rules)):
|
||||
if rules[i] == '::=':
|
||||
index.append(i-1)
|
||||
index.append(len(rules))
|
||||
|
||||
for i in xrange(len(index)-1):
|
||||
for i in range(len(index)-1):
|
||||
lhs = rules[index[i]]
|
||||
rhs = rules[index[i]+2:index[i+1]]
|
||||
rule = (lhs, tuple(rhs))
|
||||
@@ -140,7 +141,7 @@ class GenericParser:
|
||||
if _preprocess:
|
||||
rule, fn = self.preprocess(rule, func)
|
||||
|
||||
if self.rules.has_key(lhs):
|
||||
if lhs in self.rules:
|
||||
self.rules[lhs].append(rule)
|
||||
else:
|
||||
self.rules[lhs] = [ rule ]
|
||||
@@ -163,7 +164,7 @@ class GenericParser:
|
||||
self.nullable = {}
|
||||
tbd = []
|
||||
|
||||
for rulelist in self.rules.values():
|
||||
for rulelist in list(self.rules.values()):
|
||||
lhs = rulelist[0][0]
|
||||
self.nullable[lhs] = 0
|
||||
for rule in rulelist:
|
||||
@@ -178,7 +179,7 @@ class GenericParser:
|
||||
# grammars.
|
||||
#
|
||||
for sym in rhs:
|
||||
if not self.rules.has_key(sym):
|
||||
if sym not in self.rules:
|
||||
break
|
||||
else:
|
||||
tbd.append(rule)
|
||||
@@ -212,7 +213,7 @@ class GenericParser:
|
||||
|
||||
def makeNewRules(self):
|
||||
worklist = []
|
||||
for rulelist in self.rules.values():
|
||||
for rulelist in list(self.rules.values()):
|
||||
for rule in rulelist:
|
||||
worklist.append((rule, 0, 1, rule))
|
||||
|
||||
@@ -221,7 +222,7 @@ class GenericParser:
|
||||
n = len(rhs)
|
||||
while i < n:
|
||||
sym = rhs[i]
|
||||
if not self.rules.has_key(sym) or \
|
||||
if sym not in self.rules or \
|
||||
not self.nullable[sym]:
|
||||
candidate = 0
|
||||
i = i + 1
|
||||
@@ -238,7 +239,7 @@ class GenericParser:
|
||||
if candidate:
|
||||
lhs = self._NULLABLE+lhs
|
||||
rule = (lhs, rhs)
|
||||
if self.newrules.has_key(lhs):
|
||||
if lhs in self.newrules:
|
||||
self.newrules[lhs].append(rule)
|
||||
else:
|
||||
self.newrules[lhs] = [ rule ]
|
||||
@@ -248,7 +249,7 @@ class GenericParser:
|
||||
return None
|
||||
|
||||
def error(self, token):
|
||||
print "Syntax error at or near `%s' token" % token
|
||||
print("Syntax error at or near `%s' token" % token)
|
||||
raise SystemExit
|
||||
|
||||
def parse(self, tokens):
|
||||
@@ -292,7 +293,8 @@ class GenericParser:
|
||||
#
|
||||
return self._NULLABLE == sym[0:len(self._NULLABLE)]
|
||||
|
||||
def skip(self, (lhs, rhs), pos=0):
|
||||
def skip(self, xxx_todo_changeme, pos=0):
|
||||
(lhs, rhs) = xxx_todo_changeme
|
||||
n = len(rhs)
|
||||
while pos < n:
|
||||
if not self.isnullable(rhs[pos]):
|
||||
@@ -551,12 +553,12 @@ class GenericParser:
|
||||
rule = self.ambiguity(self.newrules[nt])
|
||||
else:
|
||||
rule = self.newrules[nt][0]
|
||||
#print rule
|
||||
# print(rule)
|
||||
|
||||
rhs = rule[1]
|
||||
attr = [None] * len(rhs)
|
||||
|
||||
for i in xrange(len(rhs)-1, -1, -1):
|
||||
for i in range(len(rhs)-1, -1, -1):
|
||||
attr[i] = self.deriveEpsilon(rhs[i])
|
||||
return self.rule2func[self.new2old[rule]](attr)
|
||||
|
||||
@@ -570,12 +572,12 @@ class GenericParser:
|
||||
rule = choices[0]
|
||||
if len(choices) > 1:
|
||||
rule = self.ambiguity(choices)
|
||||
#print rule
|
||||
# print(rule)
|
||||
|
||||
rhs = rule[1]
|
||||
attr = [None] * len(rhs)
|
||||
|
||||
for i in xrange(len(rhs)-1, -1, -1):
|
||||
for i in range(len(rhs)-1, -1, -1):
|
||||
sym = rhs[i]
|
||||
if not self.newrules.has_key(sym):
|
||||
if sym != self._BOF:
|
||||
@@ -599,24 +601,23 @@ class GenericParser:
|
||||
# appears in >1 method. Also undefined results if rules
|
||||
# causing the ambiguity appear in the same method.
|
||||
#
|
||||
|
||||
sortlist = []
|
||||
name2index = {}
|
||||
for i in xrange(len(rules)):
|
||||
for i in range(len(rules)):
|
||||
lhs, rhs = rule = rules[i]
|
||||
name = self.rule2name[self.new2old[rule]]
|
||||
sortlist.append((len(rhs), name))
|
||||
name2index[name] = i
|
||||
sortlist.sort()
|
||||
list = map(lambda (a,b): b, sortlist)
|
||||
list = [a_b[1] for a_b in sortlist]
|
||||
return rules[name2index[self.resolve(list)]]
|
||||
|
||||
def resolve(self, list):
|
||||
#
|
||||
# Resolve ambiguity in favor of the shortest RHS.
|
||||
# Since we walk the tree from the top down, this
|
||||
# should effectively resolve in favor of a "shift".
|
||||
#
|
||||
'''
|
||||
Resolve ambiguity in favor of the shortest RHS.
|
||||
Since we walk the tree from the top down, this
|
||||
should effectively resolve in favor of a "shift".
|
||||
'''
|
||||
return list[0]
|
||||
|
||||
#
|
||||
@@ -645,28 +646,29 @@ class GenericASTBuilder(GenericParser):
|
||||
if isinstance(arg, self.AST):
|
||||
children.append(arg)
|
||||
else:
|
||||
children.append(arg)
|
||||
children.append(self.terminal(arg))
|
||||
return self.nonterminal(lhs, children)
|
||||
|
||||
def terminal(self, token): return token
|
||||
|
||||
def nonterminal(self, type, args):
|
||||
rv = self.AST(type)
|
||||
rv[:len(args)] = args
|
||||
return rv
|
||||
|
||||
#
|
||||
# GenericASTTraversal is a Visitor pattern according to Design Patterns. For
|
||||
# each node it attempts to invoke the method n_<node type>, falling
|
||||
# back onto the default() method if the n_* can't be found. The preorder
|
||||
# traversal also looks for an exit hook named n_<node type>_exit (no default
|
||||
# routine is called if it's not found). To prematurely halt traversal
|
||||
# of a subtree, call the prune() method -- this only makes sense for a
|
||||
# preorder traversal. Node type is determined via the typestring() method.
|
||||
#
|
||||
|
||||
class GenericASTTraversalPruningException:
|
||||
pass
|
||||
|
||||
class GenericASTTraversal:
|
||||
'''
|
||||
GenericASTTraversal is a Visitor pattern according to Design Patterns. For
|
||||
each node it attempts to invoke the method n_<node type>, falling
|
||||
back onto the default() method if the n_* can't be found. The preorder
|
||||
traversal also looks for an exit hook named n_<node type>_exit (no default
|
||||
routine is called if it's not found). To prematurely halt traversal
|
||||
of a subtree, call the prune() method -- this only makes sense for a
|
||||
preorder traversal. Node type is determined via the typestring() method.
|
||||
'''
|
||||
def __init__(self, ast):
|
||||
self.ast = ast
|
||||
|
||||
|
Reference in New Issue
Block a user