diff --git a/uncompyle6/semantics/customize3.py b/uncompyle6/semantics/customize3.py
index 0bd5c067..dae70e04 100644
--- a/uncompyle6/semantics/customize3.py
+++ b/uncompyle6/semantics/customize3.py
@@ -20,7 +20,11 @@ from uncompyle6.semantics.consts import TABLE_DIRECT
from xdis.code import iscode
from uncompyle6.scanner import Code
-from uncompyle6.semantics.helper import gen_function_parens_adjust
+from uncompyle6.semantics.helper import (
+ find_code_node,
+ gen_function_parens_adjust,
+)
+
from uncompyle6.semantics.make_function3 import make_function3_annotate
from uncompyle6.semantics.customize35 import customize_for_version35
from uncompyle6.semantics.customize36 import customize_for_version36
@@ -158,6 +162,7 @@ def customize_for_version3(self, version):
self.listcomp_closure3 = listcomp_closure3
def n_classdef3(node):
+
# class definition ('class X(A,B,C):')
cclass = self.currentclass
@@ -228,10 +233,10 @@ def customize_for_version3(self, version):
# Python 3.3 classes with closures work like this.
# Note have to test before 3.2 case because
# index -2 also has an attr.
- subclass_code = load_closure[-3].attr
+ subclass_code = find_code_node(load_closure, -3).attr
elif hasattr(load_closure[-2], "attr"):
# Python 3.2 works like this
- subclass_code = load_closure[-2].attr
+ subclass_code = find_code_node(load_closure, -2).attr
else:
raise "Internal Error n_classdef: cannot find class body"
if hasattr(build_class[3], "__len__"):
diff --git a/uncompyle6/semantics/helper.py b/uncompyle6/semantics/helper.py
index 46d34e1d..e14d88f4 100644
--- a/uncompyle6/semantics/helper.py
+++ b/uncompyle6/semantics/helper.py
@@ -1,5 +1,6 @@
import sys
+from xdis.code import iscode
from uncompyle6.parsers.treenode import SyntaxTree
from uncompyle6 import PYTHON3
@@ -173,6 +174,15 @@ def print_docstring(self, indent, docstring):
self.println(lines[-1], quote)
return True
+def find_code_node(node, start):
+ for i in range(-start, len(node) + 1):
+ if node[-i].kind == "LOAD_CODE":
+ code_node = node[-i]
+ assert iscode(code_node.attr)
+ return code_node
+ pass
+ assert False, "did not find code node starting at %d in %s" % (start, node)
+
def flatten_list(node):
"""
diff --git a/uncompyle6/semantics/make_function2.py b/uncompyle6/semantics/make_function2.py
index 98b707b9..5b10fbbd 100644
--- a/uncompyle6/semantics/make_function2.py
+++ b/uncompyle6/semantics/make_function2.py
@@ -14,7 +14,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
"""
-All the crazy things we have to do to handle Python functions
+All the crazy things we have to do to handle Python functions in Python before 3.0.
+The saga of changes continues in 3.0 and above and in other files.
"""
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
from uncompyle6.scanner import Code
diff --git a/uncompyle6/semantics/make_function3.py b/uncompyle6/semantics/make_function3.py
index 708f04a9..4746634a 100644
--- a/uncompyle6/semantics/make_function3.py
+++ b/uncompyle6/semantics/make_function3.py
@@ -1,5 +1,4 @@
# Copyright (c) 2015-2019 by Rocky Bernstein
-# Copyright (c) 2000-2002 by hartmut Goebel
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -14,7 +13,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
"""
-All the crazy things we have to do to handle Python functions
+All the crazy things we have to do to handle Python functions in 3.0-3.5 or so.
+The saga of changes before and after is in other files.
"""
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
from uncompyle6.scanner import Code
diff --git a/uncompyle6/semantics/make_function36.py b/uncompyle6/semantics/make_function36.py
new file mode 100644
index 00000000..97d3baa4
--- /dev/null
+++ b/uncompyle6/semantics/make_function36.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2019 by Rocky Bernstein
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+"""
+All the crazy things we have to do to handle Python functions in 3.6 and above.
+The saga of changes before 3.6 is in other files.
+"""
+from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
+from uncompyle6.scanner import Code
+from uncompyle6.parsers.treenode import SyntaxTree
+from uncompyle6.semantics.parser_error import ParserError
+from uncompyle6.parser import ParserError as ParserError2
+from uncompyle6 import PYTHON3
+from uncompyle6.semantics.helper import (
+ print_docstring,
+ find_all_globals,
+ find_globals_and_nonlocals,
+ find_none,
+)
+
+if PYTHON3:
+ from itertools import zip_longest
+else:
+ from itertools import izip_longest as zip_longest
+
+from uncompyle6.show import maybe_show_tree_param_default
+
+
+def make_function36(self, node, is_lambda, nested=1, code_node=None):
+ """Dump function definition, doc string, and function body in
+ Python version 3.6 and above.
+ """
+ # MAKE_CLOSURE adds an additional closure slot
+
+ # In Python 3.6 and above stack change again. I understand
+ # 3.7 changes some of those changes, although I don't
+ # see it in this code yet. Yes, it is hard to follow
+ # and I am sure I haven't been able to keep up.
+
+ # Thank you, Python.
+
+ def build_param(ast, name, default, annotation=None):
+ """build parameters:
+ - handle defaults
+ - handle format tuple parameters
+ """
+ value = default
+ maybe_show_tree_param_default(self.showast, name, value)
+ if annotation:
+ result = "%s: %s=%s" % (name, annotation, value)
+ else:
+ result = "%s=%s" % (name, value)
+
+ # The below can probably be removed. This is probably
+ # a holdover from days when LOAD_CONST erroneously
+ # didn't handle LOAD_CONST None properly
+ if result[-2:] == "= ": # default was 'LOAD_CONST None'
+ result += "None"
+
+ return result
+
+ # MAKE_FUNCTION_... or MAKE_CLOSURE_...
+ assert node[-1].kind.startswith("MAKE_")
+
+ # Python 3.3+ adds a qualified name at TOS (-1)
+ # moving down the LOAD_LAMBDA instruction
+ lambda_index = -3
+
+ args_node = node[-1]
+
+ annotate_dict = {}
+
+ # Get a list of tree nodes that constitute the values for the "default
+ # parameters"; these are default values that appear before any *, and are
+ # not to be confused with keyword parameters which may appear after *.
+ args_attr = args_node.attr
+
+ if isinstance(args_attr, tuple) or isinstance(args_attr, list):
+ if len(args_attr) == 3:
+ pos_args, kw_args, annotate_argc = args_attr
+ else:
+ pos_args, kw_args, annotate_argc, closure = args_attr
+
+ i = -4
+ kw_pairs = 0
+ if closure:
+ # FIXME: fill in
+ i -= 1
+ if annotate_argc:
+ # Turn into subroutine and DRY with other use
+ annotate_node = node[i]
+ if annotate_node == "expr":
+ annotate_node = annotate_node[0]
+ annotate_name_node = annotate_node[-1]
+ if annotate_node == "dict" and annotate_name_node.kind.startswith(
+ "BUILD_CONST_KEY_MAP"
+ ):
+ types = [
+ self.traverse(n, indent="") for n in annotate_node[:-2]
+ ]
+ names = annotate_node[-2].attr
+ l = len(types)
+ assert l == len(names)
+ for i in range(l):
+ annotate_dict[names[i]] = types[i]
+ pass
+ pass
+ i -= 1
+ if kw_args:
+ kw_node = node[i]
+ if kw_node == "expr":
+ kw_node = kw_node[0]
+ if kw_node == "dict":
+ kw_pairs = kw_node[-1].attr
+
+ defparams = []
+ # FIXME: DRY with code below
+ default, kw_args, annotate_argc = args_node.attr[0:3]
+ if default:
+ expr_node = node[0]
+ if node[0] == "pos_arg":
+ expr_node = expr_node[0]
+ assert expr_node == "expr", "expecting mkfunc default node to be an expr"
+ if expr_node[0] == "LOAD_CONST" and isinstance(expr_node[0].attr, tuple):
+ defparams = [repr(a) for a in expr_node[0].attr]
+ elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
+ defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
+ else:
+ defparams = []
+ pass
+ else:
+ default, kw_args, annotate, closure = args_node.attr
+ if default:
+ expr_node = node[0]
+ if node[0] == "pos_arg":
+ expr_node = expr_node[0]
+ assert expr_node == "expr", "expecting mkfunc default node to be an expr"
+ if expr_node[0] == "LOAD_CONST" and isinstance(expr_node[0].attr, tuple):
+ defparams = [repr(a) for a in expr_node[0].attr]
+ elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
+ defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
+ else:
+ defparams = []
+
+ i = -4
+ kw_pairs = 0
+ if closure:
+ # FIXME: fill in
+ # annotate = node[i]
+ i -= 1
+ if annotate_argc:
+ # Turn into subroutine and DRY with other use
+ annotate_node = node[i]
+ if annotate_node == "expr":
+ annotate_node = annotate_node[0]
+ annotate_name_node = annotate_node[-1]
+ if annotate_node == "dict" and annotate_name_node.kind.startswith(
+ "BUILD_CONST_KEY_MAP"
+ ):
+ types = [self.traverse(n, indent="") for n in annotate_node[:-2]]
+ names = annotate_node[-2].attr
+ l = len(types)
+ assert l == len(names)
+ for i in range(l):
+ annotate_dict[names[i]] = types[i]
+ pass
+ pass
+ i -= 1
+ if kw_args:
+ kw_node = node[i]
+ if kw_node == "expr":
+ kw_node = kw_node[0]
+ if kw_node == "dict":
+ kw_pairs = kw_node[-1].attr
+ pass
+
+ if lambda_index and is_lambda and iscode(node[lambda_index].attr):
+ assert node[lambda_index].kind == "LOAD_LAMBDA"
+ code = node[lambda_index].attr
+ else:
+ code = code_node.attr
+
+ assert iscode(code)
+ scanner_code = Code(code, self.scanner, self.currentclass)
+
+ # add defaults values to parameter names
+ argc = code.co_argcount
+ kwonlyargcount = code.co_kwonlyargcount
+
+ paramnames = list(scanner_code.co_varnames[:argc])
+ kwargs = list(scanner_code.co_varnames[argc : argc + kwonlyargcount])
+
+ # defaults are for last n parameters when not in a lambda, thus reverse
+ if not is_lambda:
+ paramnames.reverse()
+ defparams.reverse()
+
+ try:
+ ast = self.build_ast(
+ scanner_code._tokens,
+ scanner_code._customize,
+ is_lambda=is_lambda,
+ noneInNames=("None" in code.co_names),
+ )
+ except (ParserError, ParserError2) as p:
+ self.write(str(p))
+ if not self.tolerate_errors:
+ self.ERROR = p
+ return
+
+ i = len(paramnames) - len(defparams)
+
+ # build parameters
+ params = []
+ if defparams:
+ for i, defparam in enumerate(defparams):
+ params.append(
+ build_param(
+ ast, paramnames[i], defparam, annotate_dict.get(paramnames[i])
+ )
+ )
+
+ for param in paramnames[i + 1 :]:
+ if param in annotate_dict:
+ params.append("%s: %s" % (param, annotate_dict[param]))
+ else:
+ params.append(param)
+ else:
+ for param in paramnames:
+ if param in annotate_dict:
+ params.append("%s: %s" % (param, annotate_dict[param]))
+ else:
+ params.append(param)
+
+ params.reverse() # back to correct order
+
+ if code_has_star_arg(code):
+ star_arg = code.co_varnames[argc + kwonlyargcount]
+ if star_arg in annotate_dict:
+ params.append("*%s: %s" % (star_arg, annotate_dict[star_arg]))
+ else:
+ params.append("*%s" % star_arg)
+
+ argc += 1
+
+ # dump parameter list (with default values)
+ if is_lambda:
+ self.write("lambda ", ", ".join(params))
+ # If the last statement is None (which is the
+ # same thing as "return None" in a lambda) and the
+ # next to last statement is a "yield". Then we want to
+ # drop the (return) None since that was just put there
+ # to have something to after the yield finishes.
+ # FIXME: this is a bit hoaky and not general
+ if (
+ len(ast) > 1
+ and self.traverse(ast[-1]) == "None"
+ and self.traverse(ast[-2]).strip().startswith("yield")
+ ):
+ del ast[-1]
+ # Now pick out the expr part of the last statement
+ ast_expr = ast[-1]
+ while ast_expr.kind != "expr":
+ ast_expr = ast_expr[0]
+ ast[-1] = ast_expr
+ pass
+ else:
+ self.write("(", ", ".join(params))
+ # self.println(indent, '#flags:\t', int(code.co_flags))
+
+ ends_in_comma = False
+ if kwonlyargcount > 0:
+ if not (4 & code.co_flags):
+ if argc > 0:
+ self.write(", *, ")
+ else:
+ self.write("*, ")
+ pass
+ ends_in_comma = True
+ else:
+ if argc > 0:
+ self.write(", ")
+ ends_in_comma = True
+
+ ann_dict = kw_dict = default_tup = None
+ fn_bits = node[-1].attr
+ # Skip over:
+ # MAKE_FUNCTION,
+ # optional docstring
+ # LOAD_CONST qualified name,
+ # LOAD_CONST code object
+ index = -5 if node[-2] == "docstring" else -4
+ if fn_bits[-1]:
+ index -= 1
+ if fn_bits[-2]:
+ ann_dict = node[index]
+ index -= 1
+ if fn_bits[-3]:
+ kw_dict = node[index]
+ index -= 1
+ if fn_bits[-4]:
+ default_tup = node[index]
+
+ if kw_dict == "expr":
+ kw_dict = kw_dict[0]
+
+ kw_args = [None] * kwonlyargcount
+
+ # FIXME: handle free_tup, ann_dict, and default_tup
+ if kw_dict:
+ assert kw_dict == "dict"
+ defaults = [self.traverse(n, indent="") for n in kw_dict[:-2]]
+ names = eval(self.traverse(kw_dict[-2]))
+ assert len(defaults) == len(names)
+ sep = ""
+ # FIXME: possibly handle line breaks
+ for i, n in enumerate(names):
+ idx = kwargs.index(n)
+ if annotate_dict and n in annotate_dict:
+ t = "%s: %s=%s" % (n, annotate_dict[n], defaults[i])
+ else:
+ t = "%s=%s" % (n, defaults[i])
+ kw_args[idx] = t
+ pass
+ pass
+ # handle others
+ other_kw = [c == None for c in kw_args]
+
+ for i, flag in enumerate(other_kw):
+ if flag:
+ n = kwargs[i]
+ if n in annotate_dict:
+ kw_args[i] = "%s: %s" % (n, annotate_dict[n])
+ else:
+ kw_args[i] = "%s" % n
+
+ self.write(", ".join(kw_args))
+ ends_in_comma = False
+ pass
+ else:
+ if argc == 0:
+ ends_in_comma = True
+
+ if code_has_star_star_arg(code):
+ if not ends_in_comma:
+ self.write(", ")
+ star_star_arg = code.co_varnames[argc + kwonlyargcount]
+ if annotate_dict and star_star_arg in annotate_dict:
+ self.write("**%s: %s" % (star_star_arg, annotate_dict[star_star_arg]))
+ else:
+ self.write("**%s" % star_star_arg)
+
+ if is_lambda:
+ self.write(": ")
+ else:
+ self.write(")")
+ if annotate_dict and "return" in annotate_dict:
+ self.write(" -> %s" % annotate_dict["return"])
+ self.println(":")
+
+ if node[-2] == "docstring" and not is_lambda:
+ # docstring exists, dump it
+ self.println(self.traverse(node[-2]))
+
+ scanner_code._tokens = None # save memory
+ assert ast == "stmts"
+
+ all_globals = find_all_globals(ast, set())
+ globals, nonlocals = find_globals_and_nonlocals(
+ ast, set(), set(), code, self.version
+ )
+
+ for g in sorted((all_globals & self.mod_globs) | globals):
+ self.println(self.indent, "global ", g)
+
+ for nl in sorted(nonlocals):
+ self.println(self.indent, "nonlocal ", nl)
+
+ self.mod_globs -= all_globals
+ has_none = "None" in code.co_names
+ rn = has_none and not find_none(ast)
+ self.gen_source(
+ ast, code.co_name, scanner_code._customize, is_lambda=is_lambda, returnNone=rn
+ )
+
+ scanner_code._tokens = None
+ scanner_code._customize = None # save memory
diff --git a/uncompyle6/semantics/pysource.py b/uncompyle6/semantics/pysource.py
index 6acfffc0..5f60a780 100644
--- a/uncompyle6/semantics/pysource.py
+++ b/uncompyle6/semantics/pysource.py
@@ -139,11 +139,13 @@ from uncompyle6.scanner import Code, get_scanner
import uncompyle6.parser as python_parser
from uncompyle6.semantics.make_function2 import make_function2
from uncompyle6.semantics.make_function3 import make_function3
+from uncompyle6.semantics.make_function36 import make_function36
from uncompyle6.semantics.parser_error import ParserError
from uncompyle6.semantics.check_ast import checker
from uncompyle6.semantics.customize import customize_for_version
from uncompyle6.semantics.helper import (
print_docstring,
+ find_code_node,
find_globals_and_nonlocals,
flatten_list,
)
@@ -841,23 +843,9 @@ class SourceWalker(GenericASTTraversal, object):
def n_mkfunc(self, node):
- if self.version >= 3.3 or node[-2] in ("kwargs", "no_kwargs"):
- # LOAD_CODET code object ..
- # LOAD_CONST "x0" if >= 3.3
- # MAKE_FUNCTION ..
- code_node = node[-3]
- elif node[-2] == "expr":
- code_node = node[-2][0]
- else:
- # LOAD_CODE code object ..
- # MAKE_FUNCTION ..
- code_node = node[-2]
-
- assert iscode(code_node.attr)
-
- func_name = code_node.attr.co_name
- self.write(func_name)
-
+ code_node = find_code_node(node, -2)
+ code = code_node.attr
+ self.write(code.co_name)
self.indent_more()
self.make_function(node, is_lambda=False, code_node=code_node)
@@ -869,11 +857,15 @@ class SourceWalker(GenericASTTraversal, object):
self.indent_less()
self.prune() # stop recursing
+ # Python changes make function this much that we need at least 3 different routines,
+ # and probably more in the future.
def make_function(self, node, is_lambda, nested=1, code_node=None, annotate=None):
- if self.version >= 3.0:
- make_function3(self, node, is_lambda, nested, code_node)
- else:
+ if self.version <= 2.7:
make_function2(self, node, is_lambda, nested, code_node)
+ elif 3.0 <= self.version <= 3.5:
+ make_function3(self, node, is_lambda, nested, code_node)
+ elif self.version >= 3.6:
+ make_function36(self, node, is_lambda, nested, code_node)
def n_docstring(self, node):
@@ -2211,7 +2203,11 @@ class SourceWalker(GenericASTTraversal, object):
def build_class(self, code):
"""Dump class definition, doc string and class body."""
- assert iscode(code)
+ try:
+ assert iscode(code)
+ except:
+ from trepan.api import debug; debug()
+
self.classes.append(self.currentclass)
code = Code(code, self.scanner, self.currentclass)
diff --git a/uncompyle6/semantics/transform.py b/uncompyle6/semantics/transform.py
index 3a0fafe8..c415c326 100644
--- a/uncompyle6/semantics/transform.py
+++ b/uncompyle6/semantics/transform.py
@@ -13,6 +13,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
+from xdis.code import iscode
from uncompyle6.show import maybe_show_tree
from copy import copy
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
@@ -30,8 +31,7 @@ def is_docstring(node):
class TreeTransform(GenericASTTraversal, object):
- def __init__(self, version, show_ast=None,
- is_pypy=False):
+ def __init__(self, version, show_ast=None, is_pypy=False):
self.version = version
self.showast = show_ast
self.is_pypy = is_pypy
@@ -67,6 +67,35 @@ class TreeTransform(GenericASTTraversal, object):
node[i] = self.preorder(kid)
return node
+ def n_mkfunc(self, node):
+ """If the function has a docstring (this is found in the code
+ constants), pull that out and make it part of the syntax
+ tree. When generating the source string that AST node rather
+ than the code field is seen and used.
+ """
+
+ for i in range(2, len(node) + 1):
+ if node[-i].kind == "LOAD_CODE":
+ break
+
+ code_node = node[-i]
+ code = code_node.attr
+ assert iscode(code)
+
+ if (
+ node[-1].pattr != "closure"
+ and len(code.co_consts) > 0
+ and code.co_consts[0] is not None
+ ):
+ docstring_node = SyntaxTree(
+ "docstring", [Token("LOAD_STR", has_arg=True, pattr=code.co_consts[0])]
+ )
+ docstring_node.transformed_by = "n_mkfunc"
+ node = SyntaxTree("mkfunc", node[:-1] + [docstring_node, node[-1]])
+ node.transformed_by = "n_mkfunc"
+
+ return node
+
def n_ifstmt(self, node):
"""Here we check if we can turn an `ifstmt` or 'iflaststmtl` into
some kind of `assert` statement"""
@@ -128,7 +157,13 @@ class TreeTransform(GenericASTTraversal, object):
expr = call[1][0]
node = SyntaxTree(
kind,
- [assert_expr, jump_cond, LOAD_ASSERT, expr, RAISE_VARARGS_1]
+ [
+ assert_expr,
+ jump_cond,
+ LOAD_ASSERT,
+ expr,
+ RAISE_VARARGS_1,
+ ],
)
pass
pass
@@ -157,10 +192,9 @@ class TreeTransform(GenericASTTraversal, object):
LOAD_ASSERT = expr[0]
node = SyntaxTree(
- kind,
- [assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
+ kind, [assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
)
- node.transformed_by="n_ifstmt",
+ node.transformed_by = ("n_ifstmt",)
pass
pass
return node
@@ -171,7 +205,9 @@ class TreeTransform(GenericASTTraversal, object):
# if elif elif
def n_ifelsestmt(self, node, preprocess=False):
"""
- Here we turn:
+ Transformation involving if..else statments.
+ For example
+
if ...
else
@@ -184,7 +220,7 @@ class TreeTransform(GenericASTTraversal, object):
[else ...]
- where appropriate
+ where appropriate.
"""
else_suite = node[3]
@@ -274,9 +310,30 @@ class TreeTransform(GenericASTTraversal, object):
self.ast = copy(ast)
self.ast = self.traverse(self.ast, is_lambda=False)
- if self.ast[-1] == RETURN_NONE:
- self.ast.pop() # remove last node
- # todo: if empty, add 'pass'
+ try:
+ for i in range(len(self.ast)):
+ if is_docstring(self.ast[i]):
+ docstring_ast = SyntaxTree(
+ "docstring",
+ [
+ Token(
+ "LOAD_STR",
+ has_arg=True,
+ offset=0,
+ pattr=self.ast[i][0][0][0][0].attr,
+ )
+ ],
+ transformed_by="transform",
+ )
+ del self.ast[i]
+ self.ast.insert(0, docstring_ast)
+ break
+
+ if self.ast[-1] == RETURN_NONE:
+ self.ast.pop() # remove last node
+ # todo: if empty, add 'pass'
+ except:
+ pass
return self.ast