diff --git a/uncompyle6/parser.py b/uncompyle6/parser.py index 1514acdd..545a0204 100644 --- a/uncompyle6/parser.py +++ b/uncompyle6/parser.py @@ -41,8 +41,8 @@ nop_func = lambda self, args: None class PythonParser(GenericASTBuilder): - def __init__(self, AST, start, debug): - super(PythonParser, self).__init__(AST, start, debug) + def __init__(self, SyntaxTree, start, debug): + super(PythonParser, self).__init__(SyntaxTree, start, debug) # FIXME: customize per python parser version nt_list = [ 'stmts', 'except_stmts', '_stmts', 'attributes', diff --git a/uncompyle6/parsers/parse2.py b/uncompyle6/parsers/parse2.py index 9a67aa16..33985ac7 100644 --- a/uncompyle6/parsers/parse2.py +++ b/uncompyle6/parsers/parse2.py @@ -28,13 +28,13 @@ that a later phase can turn into a sequence of ASCII text. from __future__ import print_function from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG class Python2Parser(PythonParser): def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG): - super(Python2Parser, self).__init__(AST, 'stmts', debug=debug_parser) + super(Python2Parser, self).__init__(SyntaxTree, 'stmts', debug=debug_parser) self.new_rules = set() def p_print2(self, args): diff --git a/uncompyle6/parsers/parse3.py b/uncompyle6/parsers/parse3.py index 884d3e1f..941743ea 100644 --- a/uncompyle6/parsers/parse3.py +++ b/uncompyle6/parsers/parse3.py @@ -27,7 +27,7 @@ that a later phase can turn into a sequence of ASCII text. """ from uncompyle6.parser import PythonParser, PythonParserSingle, nop_func -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from xdis import PYTHON3 @@ -35,7 +35,7 @@ class Python3Parser(PythonParser): def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG): self.added_rules = set() - super(Python3Parser, self).__init__(AST, 'stmts', debug=debug_parser) + super(Python3Parser, self).__init__(SyntaxTree, 'stmts', debug=debug_parser) self.new_rules = set() def p_comprehension3(self, args): diff --git a/uncompyle6/parsers/astnode.py b/uncompyle6/parsers/treenode.py similarity index 89% rename from uncompyle6/parsers/astnode.py rename to uncompyle6/parsers/treenode.py index c46eb277..f08d42c9 100644 --- a/uncompyle6/parsers/astnode.py +++ b/uncompyle6/parsers/treenode.py @@ -6,10 +6,10 @@ from spark_parser.ast import AST as spark_AST if PYTHON3: intern = sys.intern -class AST(spark_AST): +class SyntaxTree(spark_AST): def isNone(self): - """An AST None token. We can't use regular list comparisons - because AST token offsets might be different""" + """An SyntaxTree None token. We can't use regular list comparisons + because SyntaxTree token offsets might be different""" return len(self.data) == 1 and NoneToken == self.data[0] def __repr__(self): diff --git a/uncompyle6/semantics/consts.py b/uncompyle6/semantics/consts.py index 44e5b869..51b4be25 100644 --- a/uncompyle6/semantics/consts.py +++ b/uncompyle6/semantics/consts.py @@ -15,7 +15,7 @@ """Constants and initial table values used in pysource.py and fragments.py""" import re, sys -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6 import PYTHON3 from uncompyle6.scanners.tok import Token, NoneToken @@ -32,33 +32,33 @@ LINE_LENGTH = 80 # Some parse trees created below are used for comparing code # fragments (like 'return None' at the end of functions). -RETURN_LOCALS = AST('return', - [ AST('ret_expr', [AST('expr', [ Token('LOAD_LOCALS') ])]), +RETURN_LOCALS = SyntaxTree('return', + [ SyntaxTree('ret_expr', [SyntaxTree('expr', [ Token('LOAD_LOCALS') ])]), Token('RETURN_VALUE')]) -NONE = AST('expr', [ NoneToken ] ) +NONE = SyntaxTree('expr', [ NoneToken ] ) -RETURN_NONE = AST('stmt', - [ AST('return', +RETURN_NONE = SyntaxTree('stmt', + [ SyntaxTree('return', [ NONE, Token('RETURN_VALUE')]) ]) -PASS = AST('stmts', - [ AST('sstmt', - [ AST('stmt', - [ AST('pass', [])])])]) +PASS = SyntaxTree('stmts', + [ SyntaxTree('sstmt', + [ SyntaxTree('stmt', + [ SyntaxTree('pass', [])])])]) ASSIGN_DOC_STRING = lambda doc_string: \ - AST('stmt', - [ AST('assign', - [ AST('expr', [ Token('LOAD_CONST', pattr=doc_string) ]), - AST('store', [ Token('STORE_NAME', pattr='__doc__')]) + SyntaxTree('stmt', + [ SyntaxTree('assign', + [ SyntaxTree('expr', [ Token('LOAD_CONST', pattr=doc_string) ]), + SyntaxTree('store', [ Token('STORE_NAME', pattr='__doc__')]) ])]) -NAME_MODULE = AST('stmt', - [ AST('assign', - [ AST('expr', +NAME_MODULE = SyntaxTree('stmt', + [ SyntaxTree('assign', + [ SyntaxTree('expr', [Token('LOAD_NAME', pattr='__name__', offset=0, has_arg=True)]), - AST('store', + SyntaxTree('store', [ Token('STORE_NAME', pattr='__module__', offset=3, has_arg=True)]) ])]) @@ -392,7 +392,7 @@ PRECEDENCE = { } ASSIGN_TUPLE_PARAM = lambda param_name: \ - AST('expr', [ Token('LOAD_FAST', pattr=param_name) ]) + SyntaxTree('expr', [ Token('LOAD_FAST', pattr=param_name) ]) escape = re.compile(r''' (?P [^%]* ) diff --git a/uncompyle6/semantics/customize.py b/uncompyle6/semantics/customize.py index 28ec59f6..c54c2f6d 100644 --- a/uncompyle6/semantics/customize.py +++ b/uncompyle6/semantics/customize.py @@ -19,7 +19,7 @@ from uncompyle6.semantics.consts import ( TABLE_R, TABLE_DIRECT) -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.scanners.tok import Token def customize_for_version(self, is_pypy, version): @@ -104,12 +104,12 @@ def customize_for_version(self, is_pypy, version): }) global NAME_MODULE - NAME_MODULE = AST('stmt', - [ AST('assign', - [ AST('expr', + NAME_MODULE = SyntaxTree('stmt', + [ SyntaxTree('assign', + [ SyntaxTree('expr', [Token('LOAD_GLOBAL', pattr='__name__', offset=0, has_arg=True)]), - AST('store', + SyntaxTree('store', [ Token('STORE_NAME', pattr='__module__', offset=3, has_arg=True)]) ])]) diff --git a/uncompyle6/semantics/fragments.py b/uncompyle6/semantics/fragments.py index c3943c77..f4258bb7 100644 --- a/uncompyle6/semantics/fragments.py +++ b/uncompyle6/semantics/fragments.py @@ -81,7 +81,7 @@ from uncompyle6.show import ( maybe_show_tree, ) -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6.semantics.pysource import ( ParserError, StringIO) @@ -284,7 +284,7 @@ class FragmentsWalker(pysource.SourceWalker, object): else: start = len(self.f.getvalue()) + len(self.indent) self.write(self.indent, 'return') - if self.return_none or node != AST('return', [AST('ret_expr', [NONE]), + if self.return_none or node != SyntaxTree('return', [SyntaxTree('ret_expr', [NONE]), Token('RETURN_VALUE')]): self.write(' ') self.last_finish = len(self.f.getvalue()) @@ -311,7 +311,7 @@ class FragmentsWalker(pysource.SourceWalker, object): else: start = len(self.f.getvalue()) + len(self.indent) self.write(self.indent, 'return') - if self.return_none or node != AST('return', [AST('ret_expr', [NONE]), Token('RETURN_END_IF')]): + if self.return_none or node != SyntaxTree('return', [SyntaxTree('ret_expr', [NONE]), Token('RETURN_END_IF')]): self.write(' ') self.preorder(node[0]) if hasattr(node[-1], 'offset'): @@ -326,7 +326,7 @@ class FragmentsWalker(pysource.SourceWalker, object): super(FragmentsWalker, self).n_yield(node) except GenericASTTraversalPruningException: pass - if node != AST('yield', [NONE, Token('YIELD_VALUE')]): + if node != SyntaxTree('yield', [NONE, Token('YIELD_VALUE')]): node[0].parent = node self.set_pos_info(node[-1], start, len(self.f.getvalue())) self.set_pos_info(node, start, len(self.f.getvalue())) @@ -1318,7 +1318,7 @@ class FragmentsWalker(pysource.SourceWalker, object): else: nodeInfo = nodeInfo - if isinstance(nodeInfo, AST): + if isinstance(nodeInfo, SyntaxTree): nonterminal = nodeInfo[0] else: nonterminal = nodeInfo.node @@ -1816,7 +1816,7 @@ def code_deparse(co, out=StringIO(), version=None, is_pypy=None, co, version)) # Just when you think we've forgotten about what we - # were supposed to to: Generate source from AST! + # were supposed to to: Generate source from the Syntax ree! deparsed.gen_source(deparsed.ast, co.co_name, customize) deparsed.set_pos_info(deparsed.ast, 0, len(deparsed.text)) diff --git a/uncompyle6/semantics/helper.py b/uncompyle6/semantics/helper.py index 297eede4..f5e10374 100644 --- a/uncompyle6/semantics/helper.py +++ b/uncompyle6/semantics/helper.py @@ -1,6 +1,6 @@ import sys -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6 import PYTHON3 if PYTHON3: @@ -21,7 +21,7 @@ nonglobal_ops = frozenset(('STORE_DEREF', 'DELETE_DEREF')) def find_all_globals(node, globs): """Search Syntax Tree node to find variable names that are global.""" for n in node: - if isinstance(n, AST): + if isinstance(n, SyntaxTree): globs = find_all_globals(n, globs) elif n.kind in read_write_global_ops: globs.add(n.pattr) @@ -31,7 +31,7 @@ def find_globals_and_nonlocals(node, globs, nonlocals, code, version): """search a node of parse tree to find variable names that need a either 'global' or 'nonlocal' statements added.""" for n in node: - if isinstance(n, AST): + if isinstance(n, SyntaxTree): globs, nonlocals = find_globals_and_nonlocals(n, globs, nonlocals, code, version) elif n.kind in read_global_ops: @@ -48,7 +48,7 @@ def find_globals_and_nonlocals(node, globs, nonlocals, code, version): # """Find globals in this statement.""" # for n in node: # # print("XXX", n.kind, global_ops) -# if isinstance(n, AST): +# if isinstance(n, SyntaxTree): # # FIXME: do I need a caser for n.kind="mkfunc"? # if n.kind in ("conditional_lambda", "return_lambda"): # globs = find_globals(n, globs, mklambda_globals) @@ -60,7 +60,7 @@ def find_globals_and_nonlocals(node, globs, nonlocals, code, version): def find_none(node): for n in node: - if isinstance(n, AST): + if isinstance(n, SyntaxTree): if n not in ('return_stmt', 'return_if_stmt'): if find_none(n): return True diff --git a/uncompyle6/semantics/make_function.py b/uncompyle6/semantics/make_function.py index 3eb394be..7ccd48e1 100644 --- a/uncompyle6/semantics/make_function.py +++ b/uncompyle6/semantics/make_function.py @@ -18,7 +18,7 @@ All the crazy things we have to do to handle Python functions """ from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg from uncompyle6.scanner import Code -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from uncompyle6 import PYTHON3 from uncompyle6.semantics.parser_error import ParserError from uncompyle6.parser import ParserError as ParserError2 @@ -173,7 +173,7 @@ def make_function3_annotate(self, node, is_lambda, nested=1, if isinstance(aa, tuple): aa = aa[0] self.write(': "%s"' % aa) - elif isinstance(aa, AST): + elif isinstance(aa, SyntaxTree): self.write(': ') self.preorder(aa) diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py index 860414ad..88f697f4 100644 --- a/uncompyle6/semantics/pysource.py +++ b/uncompyle6/semantics/pysource.py @@ -131,7 +131,7 @@ from xdis.code import iscode from xdis.util import COMPILER_FLAG_BIT from uncompyle6.parser import get_python_parser -from uncompyle6.parsers.astnode import AST +from uncompyle6.parsers.treenode import SyntaxTree from spark_parser import GenericASTTraversal, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG from uncompyle6.scanner import Code, get_scanner import uncompyle6.parser as python_parser @@ -182,28 +182,28 @@ class SourceWalker(GenericASTTraversal, object): debug_parser=PARSER_DEFAULT_DEBUG, compile_mode='exec', is_pypy=IS_PYPY, linestarts={}, tolerate_errors=False): - """version is the Python version (a float) of the Python dialect + """`version' is the Python version (a float) of the Python dialect + of both the syntax tree and language we should produce. - of both the AST and language we should produce. - - out is IO-like file pointer to where the output should go. It + `out' is IO-like file pointer to where the output should go. It whould have a getvalue() method. - scanner is a method to call when we need to scan tokens. Sometimes + `scanner' is a method to call when we need to scan tokens. Sometimes in producing output we will run across further tokens that need to be scaned. - If showast is True, we print the AST tree. + If `showast' is True, we print the syntax tree. - compile_mode is is either 'exec' or 'single'. It isthe compile - mode that was used to create the AST and specifies a gramar variant within - a Python version to use. + `compile_mode' is is either 'exec' or 'single'. It isthe compile + mode that was used to create the Syntax Tree and specifies a + gramar variant within a Python version to use. - is_pypy should be True if the AST was generated for PyPy. + `is_pypy' should be True if the Syntax Tree was generated for PyPy. - linestarts is a dictionary of line number to bytecode offset. This + `linestarts' is a dictionary of line number to bytecode offset. This can sometimes assist in determinte which kind of source-code construct to use when there is ambiguity. + """ GenericASTTraversal.__init__(self, ast=None) self.scanner = scanner @@ -408,11 +408,11 @@ class SourceWalker(GenericASTTraversal, object): if self.version <= 2.6: return ret else: - # FIXME: should the AST expression be folded into + # FIXME: should the SyntaxTree expression be folded into # the global RETURN_NONE constant? return (ret or - node == AST('return', - [AST('ret_expr', [NONE]), Token('RETURN_VALUE')])) + node == SyntaxTree('return', + [SyntaxTree('ret_expr', [NONE]), Token('RETURN_VALUE')])) # Python 3.x can have be dead code as a result of its optimization? # So we'll add a # at the end of the return lambda so the rest is ignored @@ -457,7 +457,7 @@ class SourceWalker(GenericASTTraversal, object): self.prune() # stop recursing def n_yield(self, node): - if node != AST('yield', [NONE, Token('YIELD_VALUE')]): + if node != SyntaxTree('yield', [NONE, Token('YIELD_VALUE')]): self.template_engine(( 'yield %c', 0), node) elif self.version <= 2.4: # Early versions of Python don't allow a plain "yield" @@ -2087,10 +2087,10 @@ class SourceWalker(GenericASTTraversal, object): if self.version < 3.0: # Should we ditch this in favor of the "else" case? qualname = '.'.join(self.classes) - QUAL_NAME = AST('stmt', - [ AST('assign', - [ AST('expr', [Token('LOAD_CONST', pattr=qualname)]), - AST('store', [ Token('STORE_NAME', pattr='__qualname__')]) + QUAL_NAME = SyntaxTree('stmt', + [ SyntaxTree('assign', + [ SyntaxTree('expr', [Token('LOAD_CONST', pattr=qualname)]), + SyntaxTree('store', [ Token('STORE_NAME', pattr='__qualname__')]) ])]) have_qualname = (ast[0][0] == QUAL_NAME) else: @@ -2151,7 +2151,7 @@ class SourceWalker(GenericASTTraversal, object): self.classes.pop(-1) def gen_source(self, ast, name, customize, is_lambda=False, returnNone=False): - """convert AST to Python source code""" + """convert SyntaxTree to Python source code""" rn = self.return_none self.return_none = returnNone