You've already forked python-uncompyle6
mirror of
https://github.com/rocky/python-uncompyle6.git
synced 2025-08-03 16:59:52 +08:00
Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
4f545c5bfa | ||
|
bbfdb814bf | ||
|
d088e7ef11 | ||
|
6646d18c7a | ||
|
716e097654 | ||
|
dba95c5200 | ||
|
d5df411c7a | ||
|
077f192711 | ||
|
498df35a6c | ||
|
7e71ce3260 | ||
|
825add1af7 | ||
|
1a901bde8f | ||
|
732b5165c2 | ||
|
7bd81efe9b | ||
|
c42e16fafe | ||
|
6de57249ed | ||
|
faf6ea9630 | ||
|
566143b515 | ||
|
b2e1edb434 | ||
|
62c249d6b2 | ||
|
db2fdb30fd | ||
|
37301ab49e | ||
|
97e3a7eb02 |
36
NEWS.md
36
NEWS.md
@@ -1,3 +1,39 @@
|
||||
3.6.2: 2020-1-5 Samish
|
||||
======================
|
||||
|
||||
Yet again the focus has been on just fixing bugs, mostly geared in the
|
||||
later 3.x range. To get some sense what sill needs fixing, consult
|
||||
test/stdlib/runtests.sh. And that only has a portion of what's known.
|
||||
|
||||
`make_function.py` has gotten so complex that it was split out into 3 parts
|
||||
to handle different version ranges: Python <3, Python 3.0..3.6 and Python 3.7+.
|
||||
|
||||
An important fix is that we had been dropping docstrings in Python 3 code as a result
|
||||
of a incomplete merge from the decompile3 base with respect to the transform phase.
|
||||
|
||||
Also important (at least to me) is that we can now handle 3.6+
|
||||
variable type annotations. Some of the decompile3 code uses that in
|
||||
its source code, and I now use variable annotations in conjunction
|
||||
with mypy in some of my other Python projects
|
||||
|
||||
Code generation for imports, especially where the import is dotted
|
||||
changed a bit in 3.7; with this release are just now tracking that
|
||||
change better. For this I've added pseudo instruction
|
||||
`IMPORT_NAME_ATTR`, derived from the `IMPORT_NAME` instruction, to
|
||||
indicate when an import contains a dotted import. Similarly, code for
|
||||
3.7 `import .. as ` is basically the same as `from .. import`, the
|
||||
only difference is the target of the name changes to an "alias" in the
|
||||
former. As a result, the disambiguation is now done on the semantic
|
||||
action side, rathero than in parsing grammar rules.
|
||||
|
||||
Some small specific fixes:
|
||||
|
||||
* 3.7+ some chained compare parsing has been fixed. Other remain.
|
||||
* better if/else rule checking in the 3.4 and below range.
|
||||
* 3.4+ keyword-only parameter handling was fixed more generally
|
||||
* 3.3 .. 3.5 keyword-only parameter args in lambda was fixed
|
||||
|
||||
|
||||
3.6.1: 2019-12-10 Christmas Hannukah
|
||||
====================================
|
||||
|
||||
|
@@ -35,7 +35,7 @@ def test_grammar():
|
||||
|
||||
expect_right_recursive = set([("designList", ("store", "DUP_TOP", "designList"))])
|
||||
|
||||
if PYTHON_VERSION <= 3.7:
|
||||
if PYTHON_VERSION <= 3.6:
|
||||
unused_rhs.add("call")
|
||||
|
||||
if PYTHON_VERSION > 2.6:
|
||||
|
1
test/.gitignore
vendored
1
test/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
/.coverage
|
||||
/nohup.out
|
||||
|
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.3_run/00_docstring.pyc
Normal file
BIN
test/bytecode_3.3_run/00_docstring.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.3_run/02_pos_args.pyc
Normal file
BIN
test/bytecode_3.3_run/02_pos_args.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.5_run/02_pos_args.pyc
Normal file
BIN
test/bytecode_3.5_run/02_pos_args.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.5_run/04_importlist.pyc
Normal file
BIN
test/bytecode_3.5_run/04_importlist.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.6_run/02_var_annotate.pyc
Normal file
BIN
test/bytecode_3.6_run/02_var_annotate.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.6_run/04_importlist.pyc
Normal file
BIN
test/bytecode_3.6_run/04_importlist.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
test/bytecode_3.7_run/02_var_annotate.pyc
Normal file
BIN
test/bytecode_3.7_run/02_var_annotate.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/04_importlist.pyc
Normal file
BIN
test/bytecode_3.7_run/04_importlist.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/06_listcomp.pyc
Normal file
BIN
test/bytecode_3.7_run/06_listcomp.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.7_run/10_complex.pyc
Normal file
BIN
test/bytecode_3.7_run/10_complex.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.8_run/02_var_annotate.pyc
Normal file
BIN
test/bytecode_3.8_run/02_var_annotate.pyc
Normal file
Binary file not shown.
BIN
test/bytecode_3.8_run/04_importlist.pyc
Normal file
BIN
test/bytecode_3.8_run/04_importlist.pyc
Normal file
Binary file not shown.
@@ -22,3 +22,11 @@ def columnize(l):
|
||||
return [i for i in range(len(l))
|
||||
if not isinstance(l[i], str)]
|
||||
assert [0, 2] == columnize([1, 'a', 2])
|
||||
|
||||
# From 3.7 test_generators
|
||||
# Bug was in handling the way list_if is optimized in 3.7+;
|
||||
# We need list_if37 and compare_chained37.
|
||||
def init_board(c):
|
||||
return [io for io in c if 3 <= io < 5]
|
||||
|
||||
assert init_board(list(range(6))) == [3, 4]
|
||||
|
@@ -1,4 +1,5 @@
|
||||
# Greatly simplified from from 3.3 test_complex.py
|
||||
from math import atan2
|
||||
|
||||
# RUNNABLE!
|
||||
def assertCloseAbs(x, y, eps=1e-09):
|
||||
@@ -38,14 +39,16 @@ def test_truediv():
|
||||
check_div(x, y)
|
||||
|
||||
def test_plus_minus_0j():
|
||||
z1, z2 = (0j, (-0 - 0j))
|
||||
assert -0j == -0j == complex(0.0, 0.0)
|
||||
assert -0-0j == -0j == complex(0.0, 0.0)
|
||||
z1, z2 = (0j, -0j)
|
||||
assert atan2(z1.imag, -1.0) == atan2(0.0, -1.0)
|
||||
assert atan2(z2.imag, -1.0), atan2(-0.0, -1.0)
|
||||
# assert atan2(z2.imag, -1.0), atan2(-0.0, -1.0)
|
||||
|
||||
# Check that we can handle -inf, and inf as a complex numbers.
|
||||
# And put it in a tuple and a list to make it harder.
|
||||
z1, z2 = (-1e1000j, 1e1000j)
|
||||
assert z1 in [-1e1000j, 1e1000j]
|
||||
assert z1 == z2
|
||||
assert z1 != z2
|
||||
test_truediv()
|
||||
test_plus_minus0j()
|
||||
test_plus_minus_0j()
|
||||
|
@@ -1,7 +1,16 @@
|
||||
# From 3.7 test_cmath.py
|
||||
# Had bug in 3.x in not having semantic importlist rule
|
||||
def main(osp, Mfile, mainpyfile, dbg=None):
|
||||
try:
|
||||
from xdis import load_module, PYTHON_VERSION, IS_PYPY
|
||||
return PYTHON_VERSION, IS_PYPY, load_module
|
||||
except:
|
||||
pass
|
||||
# bug is treating "import as" as "from xx import" while
|
||||
# still being able to hand "from xx import" properly
|
||||
|
||||
# RUNNABLE!
|
||||
import os.path as osp
|
||||
from sys import path
|
||||
from os import sep, name
|
||||
import collections.abc
|
||||
|
||||
assert osp.basename("a") == "a"
|
||||
assert path
|
||||
assert sep
|
||||
assert name
|
||||
assert collections.abc
|
||||
|
12
test/simple_source/bug36/02_var_annotate.py
Normal file
12
test/simple_source/bug36/02_var_annotate.py
Normal file
@@ -0,0 +1,12 @@
|
||||
# 3.6+ type annotations on variables
|
||||
from typing import List
|
||||
|
||||
# RUNNABLE!
|
||||
y = 2
|
||||
x: bool
|
||||
z: int = 5
|
||||
x = (z == 5)
|
||||
assert x
|
||||
assert y == 2
|
||||
v: List[int] = [1, 2]
|
||||
assert v[1] == y
|
@@ -4,6 +4,7 @@ from os import path
|
||||
from os import *
|
||||
import time as time1, os as os1
|
||||
import http.client as httpclient
|
||||
from sys import stdin, stdout, stderr
|
||||
if len(__file__) == 0:
|
||||
# a.b.c should force consecutive LOAD_ATTRs
|
||||
import a.b.c as d
|
||||
|
@@ -108,7 +108,8 @@ case $PYVERSION in
|
||||
|
||||
[test_capi.py]=1
|
||||
[test_curses.py]=1 # Possibly fails on its own but not detected
|
||||
[test test_cmd_line.py]=1 # Takes too long, maybe hangs, or looking for interactive input?
|
||||
[test_cmd_line.py]=1 # Takes too long, maybe hangs, or looking for interactive input?
|
||||
[test_compilex.py]=1 # Probably complex literals again. Investigate
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_doctest.py]=1 # Fails on its own
|
||||
[test_exceptions.py]=1
|
||||
@@ -148,9 +149,46 @@ case $PYVERSION in
|
||||
SKIP_TESTS[test_base64.py]=1
|
||||
fi
|
||||
;;
|
||||
3.0)
|
||||
SKIP_TESTS=(
|
||||
[test_array.py]=1 # Handling of bytestring
|
||||
[test_concurrent_futures.py]=1 # too long to run over 46 seconds by itself
|
||||
[test_datetimetester.py]=1
|
||||
[test_decimal.py]=1
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_fileio.py]=1
|
||||
)
|
||||
if (( batch )) ; then
|
||||
# Fails in crontab environment?
|
||||
# Figure out what's up here
|
||||
SKIP_TESTS[test_exception_variations.py]=1
|
||||
SKIP_TESTS[test_quopri.py]=1
|
||||
fi
|
||||
;;
|
||||
3.1)
|
||||
SKIP_TESTS=(
|
||||
[test_collections.py]=1
|
||||
[test_concurrent_futures.py]=1 # too long to run over 46 seconds by itself
|
||||
[test_datetimetester.py]=1
|
||||
[test_decimal.py]=1
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_fileio.py]=1
|
||||
)
|
||||
if (( batch )) ; then
|
||||
# Fails in crontab environment?
|
||||
# Figure out what's up here
|
||||
SKIP_TESTS[test_exception_variations.py]=1
|
||||
SKIP_TESTS[test_quopri.py]=1
|
||||
fi
|
||||
;;
|
||||
3.2)
|
||||
SKIP_TESTS=(
|
||||
[test_ast.py]=1 # Look at: AssertionError: b'hi' != 'hi'
|
||||
[test_cmd_line.py]=1
|
||||
[test_collections.py]=1
|
||||
[test_concurrent_futures.py]=1 # too long to run over 46 seconds by itself
|
||||
[test_datetimetester.py]=1
|
||||
[test_decimal.py]=1
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_quopri.py]=1 # TypeError: Can't convert 'bytes' object to str implicitly
|
||||
)
|
||||
@@ -176,7 +214,11 @@ case $PYVERSION in
|
||||
|
||||
3.4)
|
||||
SKIP_TESTS=(
|
||||
[test_asynchat.py]=1 #
|
||||
[test_asyncore.py]=1 #
|
||||
[test_atexit.py]=1 #
|
||||
[test_bdb.py]=1 #
|
||||
[test_binascii]=1
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
)
|
||||
if (( batch )) ; then
|
||||
@@ -188,7 +230,7 @@ case $PYVERSION in
|
||||
;;
|
||||
3.5)
|
||||
SKIP_TESTS=(
|
||||
[test_ast.py]=1 #
|
||||
[test_ast.py]=1 # line 379, in test_literal_eval self.assertEqual(ast.literal_eval('b"hi"'), 'hi')
|
||||
[test_atexit.py]=1 #
|
||||
[test_builtin.py]=1 #
|
||||
[test_compare.py]=1
|
||||
@@ -222,29 +264,41 @@ case $PYVERSION in
|
||||
SKIP_TESTS=(
|
||||
[test_ast.py]=1 #
|
||||
[test_atexit.py]=1 #
|
||||
[test_baseexception.py]=1 #
|
||||
[test_bdb.py]=1 #
|
||||
[test_buffer.py]=1 #
|
||||
[test_builtin.py]=1 #
|
||||
[test_buffer.py]=1 # parse error
|
||||
[test_builtin.py]=1 # parser error
|
||||
[test_cmdline.py]=1 # Interactive?
|
||||
[test_codecs-3.7.py]=1
|
||||
[test_collections.py]=1 # Fixed I think in decompyle3 - pull from there
|
||||
[test_compare.py]=1
|
||||
[test_compile.py]=1
|
||||
[test_configparser.py]=1
|
||||
[test_contains.py]=1 # Code "while False: yield None" is optimized away in compilation
|
||||
[test_contextlib_async.py]=1 # Investigate
|
||||
[test_context.py]=1
|
||||
[test_coroutines.py]=1 # Parse error
|
||||
[test_crypt.py]=1 # Parse error
|
||||
[test_curses.py]=1 # Parse error
|
||||
[test_decorators.py]=1 # Control flow wrt "if elif"
|
||||
[test_dataclasses.py]=1 # parse error
|
||||
[test_datetime.py]=1 # Takes too long
|
||||
[test_dbm_gnu.py]=1 # Takes too long
|
||||
[test_decimal.py]=1 # Parse error
|
||||
[test_descr.py]=1 # Parse error
|
||||
[test_dictcomps.py]=1 # Bad semantics - Investigate
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_enumerate.py]=1 #
|
||||
[test_enum.py]=1 #
|
||||
[test_faulthandler.py]=1 # takes too long
|
||||
[test_generators.py]=1 # improper decompile of assert i < n and (n-i) % 3 == 0
|
||||
# ...
|
||||
)
|
||||
;;
|
||||
3.8)
|
||||
SKIP_TESTS=(
|
||||
[test_contains.py]=1 # Code "while False: yield None" is optimized away in compilation
|
||||
[test_collections.py]=1 # Fixed I think in decompyle3 - pull from there
|
||||
[test_collections.py]=1 # Investigate
|
||||
[test_decorators.py]=1 # Control flow wrt "if elif"
|
||||
[test_exceptions.py]=1 # parse error
|
||||
[test_dis.py]=1 # We change line numbers - duh!
|
||||
[test_pow.py]=1 # Control flow wrt "continue"
|
||||
[test_quopri.py]=1 # Only fails on POWER
|
||||
|
@@ -135,8 +135,8 @@ def do_tests(
|
||||
pass
|
||||
|
||||
if len(files) > max_files:
|
||||
files = [file for file in files if not "site-packages" in file]
|
||||
files = [file for file in files if not "test" in file]
|
||||
files = [file for file in files if not "site-packages" in file and (file.endswith(".pyo") or file.endswith(".pyc"))]
|
||||
files = [file for file in files if not "test" in file and (file.endswith(".pyo") or file.endswith(".pyc"))]
|
||||
if len(files) > max_files:
|
||||
# print("Number of files %d - truncating to last 200" % len(files))
|
||||
print(
|
||||
|
@@ -62,6 +62,8 @@ class PythonParser(GenericASTBuilder):
|
||||
'kvlist_n',
|
||||
# Python 3.6+
|
||||
'come_from_loops',
|
||||
# Python 3.7+
|
||||
'importlist37',
|
||||
]
|
||||
self.collect = frozenset(nt_list)
|
||||
|
||||
|
@@ -1586,6 +1586,12 @@ class Python3Parser(PythonParser):
|
||||
if not isinstance(come_froms, Token):
|
||||
return tokens[first].offset > come_froms[-1].attr
|
||||
return False
|
||||
elif lhs == "ifelsestmt" and rule[1][2] == "jump_forward_else":
|
||||
last = min(last, len(tokens)-1)
|
||||
if tokens[last].off2int() == -1:
|
||||
last -= 1
|
||||
jump_forward_else = ast[2]
|
||||
return tokens[first].off2int() <= jump_forward_else[0].attr < tokens[last].off2int()
|
||||
|
||||
return False
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2017-2019 Rocky Bernstein
|
||||
# Copyright (c) 2017-2020 Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -22,7 +22,6 @@ from spark_parser import DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.parsers.parse37base import Python37BaseParser
|
||||
|
||||
class Python37Parser(Python37BaseParser):
|
||||
|
||||
def __init__(self, debug_parser=PARSER_DEFAULT_DEBUG):
|
||||
super(Python37Parser, self).__init__(debug_parser)
|
||||
self.customized = {}
|
||||
@@ -328,6 +327,33 @@ class Python37Parser(Python37BaseParser):
|
||||
attributes ::= LOAD_ATTR+
|
||||
"""
|
||||
|
||||
def p_import37(self, args):
|
||||
"""
|
||||
stmt ::= import_as37
|
||||
import_as37 ::= LOAD_CONST LOAD_CONST importlist37 store POP_TOP
|
||||
|
||||
importlist37 ::= importlist37 ROT_TWO IMPORT_FROM
|
||||
importlist37 ::= importlist37 ROT_TWO POP_TOP IMPORT_FROM
|
||||
importlist37 ::= importattr37
|
||||
importattr37 ::= IMPORT_NAME_ATTR IMPORT_FROM
|
||||
|
||||
# The 3.7base scanner adds IMPORT_NAME_ATTR
|
||||
alias ::= IMPORT_NAME_ATTR attributes store
|
||||
alias ::= IMPORT_NAME_ATTR store
|
||||
import_from ::= LOAD_CONST LOAD_CONST importlist POP_TOP
|
||||
|
||||
expr ::= attribute37
|
||||
attribute37 ::= expr LOAD_METHOD
|
||||
|
||||
stmt ::= import_from37
|
||||
importlist37 ::= importlist37 alias37
|
||||
importlist37 ::= alias37
|
||||
alias37 ::= IMPORT_NAME store
|
||||
alias37 ::= IMPORT_FROM store
|
||||
import_from37 ::= LOAD_CONST LOAD_CONST IMPORT_NAME_ATTR importlist37 POP_TOP
|
||||
|
||||
"""
|
||||
|
||||
def p_list_comprehension(self, args):
|
||||
"""
|
||||
expr ::= list_comp
|
||||
@@ -501,104 +527,12 @@ class Python37Parser(Python37BaseParser):
|
||||
iflaststmt ::= testexpr c_stmts_opt JUMP_FORWARD
|
||||
"""
|
||||
|
||||
def p_36misc(self, args):
|
||||
def p_37async(self, args):
|
||||
"""
|
||||
sstmt ::= sstmt RETURN_LAST
|
||||
|
||||
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
|
||||
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST
|
||||
|
||||
for_block ::= l_stmts_opt come_from_loops JUMP_BACK
|
||||
come_from_loops ::= COME_FROM_LOOP*
|
||||
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt
|
||||
JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt
|
||||
come_froms JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# 3.6 due to jump optimization, we sometimes add RETURN_END_IF where
|
||||
# RETURN_VALUE is meant. Specifcally this can happen in
|
||||
# ifelsestmt -> ...else_suite _. suite_stmts... (last) stmt
|
||||
return ::= ret_expr RETURN_END_IF
|
||||
return ::= ret_expr RETURN_VALUE COME_FROM
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA COME_FROM
|
||||
|
||||
# A COME_FROM is dropped off because of JUMP-to-JUMP optimization
|
||||
and ::= expr jmp_false expr
|
||||
and ::= expr jmp_false expr jmp_false
|
||||
|
||||
jf_cf ::= JUMP_FORWARD COME_FROM
|
||||
cf_jf_else ::= come_froms JUMP_FORWARD ELSE
|
||||
|
||||
conditional ::= expr jmp_false expr jf_cf expr COME_FROM
|
||||
|
||||
async_for_stmt ::= setup_loop expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
# Adds a COME_FROM_ASYNC_WITH over 3.5
|
||||
# FIXME: remove corresponding rule for 3.5?
|
||||
|
||||
except_suite ::= c_stmts_opt COME_FROM POP_EXCEPT jump_except COME_FROM
|
||||
|
||||
jb_cfs ::= come_from_opt JUMP_BACK come_froms
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cfs else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jf_else else_suitel
|
||||
|
||||
# In 3.6+, A sequence of statements ending in a RETURN can cause
|
||||
# JUMP_FORWARD END_FINALLY to be omitted from try middle
|
||||
|
||||
except_return ::= POP_TOP POP_TOP POP_TOP returns
|
||||
except_handler ::= JUMP_FORWARD COME_FROM_EXCEPT except_return
|
||||
|
||||
# Try middle following a returns
|
||||
except_handler36 ::= COME_FROM_EXCEPT except_stmts END_FINALLY
|
||||
|
||||
stmt ::= try_except36
|
||||
try_except36 ::= SETUP_EXCEPT returns except_handler36
|
||||
opt_come_from_except
|
||||
try_except36 ::= SETUP_EXCEPT suite_stmts
|
||||
try_except36 ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
except_handler36 come_from_opt
|
||||
|
||||
# 3.6 omits END_FINALLY sometimes
|
||||
except_handler36 ::= COME_FROM_EXCEPT except_stmts
|
||||
except_handler36 ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts
|
||||
|
||||
stmt ::= tryfinally36
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY
|
||||
except_suite_finalize ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY _jump
|
||||
|
||||
stmt ::= tryfinally_return_stmt
|
||||
tryfinally_return_stmt ::= SETUP_FINALLY suite_stmts_opt POP_BLOCK LOAD_CONST
|
||||
COME_FROM_FINALLY
|
||||
|
||||
compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD
|
||||
"""
|
||||
|
||||
def p_37misc(self, args):
|
||||
"""
|
||||
stmt ::= import37
|
||||
stmt ::= async_for_stmt37
|
||||
stmt ::= async_for_stmt
|
||||
stmt ::= async_forelse_stmt
|
||||
|
||||
# Where does the POP_TOP really belong?
|
||||
import37 ::= import POP_TOP
|
||||
|
||||
async_for_stmt ::= setup_loop expr
|
||||
GET_AITER
|
||||
SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
@@ -637,42 +571,36 @@ class Python37Parser(Python37BaseParser):
|
||||
COME_FROM
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_TOP POP_BLOCK
|
||||
else_suite COME_FROM_LOOP
|
||||
"""
|
||||
|
||||
attributes ::= IMPORT_FROM ROT_TWO POP_TOP IMPORT_FROM
|
||||
attributes ::= attributes ROT_TWO POP_TOP IMPORT_FROM
|
||||
|
||||
attribute37 ::= expr LOAD_METHOD
|
||||
expr ::= attribute37
|
||||
|
||||
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
|
||||
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
|
||||
# ELSE added to the except_suite. With better flow control perhaps we can
|
||||
# sort this out better.
|
||||
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
|
||||
|
||||
def p_37chained(self, args):
|
||||
"""
|
||||
testtrue ::= compare_chained37
|
||||
testfalse ::= compare_chained37_false
|
||||
|
||||
compare_chained ::= compare_chained37
|
||||
compare_chained ::= compare_chained37_false
|
||||
|
||||
compare_chained37 ::= expr compare_chained1a_37
|
||||
compare_chained37 ::= expr compare_chained1b_37
|
||||
compare_chained37 ::= expr compare_chained1c_37
|
||||
|
||||
compare_chained37_false ::= expr compare_chained1_false_37
|
||||
compare_chained37_false ::= expr compare_chained1b_false_37
|
||||
compare_chained37_false ::= expr compare_chained2_false_37
|
||||
|
||||
compare_chained1a_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained1a_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2a_37 COME_FROM POP_TOP COME_FROM
|
||||
compare_chained1b_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2b_37 POP_TOP JUMP_FORWARD COME_FROM
|
||||
compare_chained1b_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2b_false_37 POP_TOP _jump COME_FROM
|
||||
|
||||
compare_chained1c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2a_37 POP_TOP
|
||||
|
||||
compare_chained1_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2c_37 POP_TOP JUMP_FORWARD COME_FROM
|
||||
compare_chained1_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2b_37 POP_TOP _jump COME_FROM
|
||||
compare_chained2b_false_37 POP_TOP _jump COME_FROM
|
||||
|
||||
compare_chained2_false_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP POP_JUMP_IF_FALSE
|
||||
compare_chained2a_false_37 POP_TOP JUMP_BACK COME_FROM
|
||||
@@ -681,19 +609,26 @@ class Python37Parser(Python37BaseParser):
|
||||
compare_chained2a_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_TRUE JUMP_BACK
|
||||
compare_chained2a_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE jf_cfs
|
||||
|
||||
compare_chained2b_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD COME_FROM
|
||||
compare_chained2b_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD
|
||||
compare_chained2b_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD COME_FROM
|
||||
compare_chained2b_false_37 ::= expr COMPARE_OP come_from_opt POP_JUMP_IF_FALSE JUMP_FORWARD
|
||||
|
||||
compare_chained2c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE
|
||||
compare_chained2a_false_37 ELSE
|
||||
compare_chained2c_37 ::= expr DUP_TOP ROT_THREE COMPARE_OP come_from_opt POP_JUMP_IF_FALSE
|
||||
compare_chained2a_false_37
|
||||
"""
|
||||
|
||||
def p_37conditionals(self, args):
|
||||
"""
|
||||
jf_cfs ::= JUMP_FORWARD _come_froms
|
||||
ifelsestmt ::= testexpr c_stmts_opt jf_cfs else_suite opt_come_from_except
|
||||
|
||||
jmp_false37 ::= POP_JUMP_IF_FALSE COME_FROM
|
||||
list_if ::= expr jmp_false37 list_iter
|
||||
list_iter ::= list_if37
|
||||
list_iter ::= list_if37_not
|
||||
list_if37 ::= compare_chained37_false list_iter
|
||||
list_if37_not ::= compare_chained37 list_iter
|
||||
|
||||
_ifstmts_jump ::= c_stmts_opt come_froms
|
||||
|
||||
@@ -760,6 +695,28 @@ class Python37Parser(Python37BaseParser):
|
||||
comp_iter ::= comp_body
|
||||
"""
|
||||
|
||||
def p_expr3(self, args):
|
||||
"""
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr jump_forward_else expr COME_FROM
|
||||
|
||||
# a JUMP_FORWARD to another JUMP_FORWARD can get turned into
|
||||
# a JUMP_ABSOLUTE with no COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_absolute_else expr
|
||||
|
||||
# if_expr_true are for conditions which always evaluate true
|
||||
# There is dead or non-optional remnants of the condition code though,
|
||||
# and we use that to match on to reconstruct the source more accurately
|
||||
expr ::= if_expr_true
|
||||
if_expr_true ::= expr JUMP_FORWARD expr COME_FROM
|
||||
"""
|
||||
|
||||
def p_generator_exp3(self, args):
|
||||
"""
|
||||
load_genexpr ::= LOAD_GENEXPR
|
||||
load_genexpr ::= BUILD_TUPLE_1 LOAD_GENEXPR LOAD_STR
|
||||
"""
|
||||
|
||||
def p_grammar(self, args):
|
||||
"""
|
||||
sstmt ::= stmt
|
||||
@@ -818,6 +775,7 @@ class Python37Parser(Python37BaseParser):
|
||||
iflaststmtl ::= testexpr c_stmts JUMP_BACK POP_BLOCK
|
||||
|
||||
# These are used to keep parse tree indices the same
|
||||
jump_forward_else ::= JUMP_FORWARD
|
||||
jump_forward_else ::= JUMP_FORWARD ELSE
|
||||
jump_forward_else ::= JUMP_FORWARD COME_FROM
|
||||
jump_absolute_else ::= JUMP_ABSOLUTE ELSE
|
||||
@@ -942,6 +900,7 @@ class Python37Parser(Python37BaseParser):
|
||||
ret_cond ::= expr POP_JUMP_IF_FALSE expr RETURN_END_IF COME_FROM ret_expr_or_cond
|
||||
|
||||
jitop_come_from ::= JUMP_IF_TRUE_OR_POP COME_FROM
|
||||
jifop_come_from ::= JUMP_IF_FALSE_OR_POP COME_FROM
|
||||
or ::= and jitop_come_from expr COME_FROM
|
||||
or ::= expr JUMP_IF_TRUE_OR_POP expr COME_FROM
|
||||
or ::= expr JUMP_IF_TRUE expr COME_FROM
|
||||
@@ -955,11 +914,13 @@ class Python37Parser(Python37BaseParser):
|
||||
testfalse ::= or jmp_false COME_FROM
|
||||
or ::= expr jmp_true expr
|
||||
|
||||
|
||||
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr COME_FROM
|
||||
and ::= expr JUMP_IF_FALSE_OR_POP expr come_from_opt
|
||||
and ::= expr jifop_come_from expr
|
||||
and ::= expr JUMP_IF_FALSE expr COME_FROM
|
||||
|
||||
pjit_come_from ::= POP_JUMP_IF_TRUE COME_FROM
|
||||
or ::= expr pjit_come_from expr
|
||||
|
||||
## FIXME: Is the below needed or is it covered above??
|
||||
and ::= expr jmp_false expr COME_FROM
|
||||
or ::= expr jmp_true expr COME_FROM
|
||||
@@ -975,6 +936,8 @@ class Python37Parser(Python37BaseParser):
|
||||
"""
|
||||
stmt ::= if_expr_lambda
|
||||
stmt ::= conditional_not_lambda
|
||||
stmt ::= ifstmtl
|
||||
|
||||
if_expr_lambda ::= expr jmp_false expr return_if_lambda
|
||||
return_stmt_lambda LAMBDA_MARKER
|
||||
conditional_not_lambda
|
||||
@@ -989,6 +952,10 @@ class Python37Parser(Python37BaseParser):
|
||||
|
||||
stmt ::= whileTruestmt
|
||||
ifelsestmt ::= testexpr c_stmts_opt JUMP_FORWARD else_suite _come_froms
|
||||
|
||||
_ifstmts_jumpl ::= c_stmts JUMP_BACK
|
||||
_ifstmts_jumpl ::= _ifstmts_jump
|
||||
ifstmtl ::= testexpr _ifstmts_jumpl
|
||||
"""
|
||||
|
||||
def p_loop_stmt3(self, args):
|
||||
@@ -1011,12 +978,20 @@ class Python37Parser(Python37BaseParser):
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt COME_FROM JUMP_BACK POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt JUMP_BACK POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
whilestmt ::= setup_loop testexpr returns POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
# We can be missing a COME_FROM_LOOP if the "while" statement is nested inside an if/else
|
||||
# so after the POP_BLOCK we have a JUMP_FORWARD which forms the "else" portion of the "if"
|
||||
# This is undoubtedly some sort of JUMP optimization going on.
|
||||
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt JUMP_BACK come_froms
|
||||
POP_BLOCK
|
||||
|
||||
while1elsestmt ::= setup_loop l_stmts JUMP_BACK
|
||||
else_suitel
|
||||
|
||||
@@ -1024,11 +999,12 @@ class Python37Parser(Python37BaseParser):
|
||||
else_suitel COME_FROM_LOOP
|
||||
|
||||
whileTruestmt ::= setup_loop l_stmts_opt JUMP_BACK POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
_come_froms
|
||||
|
||||
# FIXME: Python 3.? starts adding branch optimization? Put this starting there.
|
||||
|
||||
while1stmt ::= setup_loop l_stmts COME_FROM_LOOP
|
||||
while1stmt ::= setup_loop l_stmts COME_FROM_LOOP JUMP_BACK POP_BLOCK COME_FROM_LOOP
|
||||
while1stmt ::= setup_loop l_stmts COME_FROM JUMP_BACK COME_FROM_LOOP
|
||||
|
||||
while1elsestmt ::= setup_loop l_stmts JUMP_BACK
|
||||
@@ -1039,26 +1015,105 @@ class Python37Parser(Python37BaseParser):
|
||||
COME_FROM_LOOP
|
||||
"""
|
||||
|
||||
def p_generator_exp3(self, args):
|
||||
def p_36misc(self, args):
|
||||
"""
|
||||
load_genexpr ::= LOAD_GENEXPR
|
||||
load_genexpr ::= BUILD_TUPLE_1 LOAD_GENEXPR LOAD_STR
|
||||
sstmt ::= sstmt RETURN_LAST
|
||||
|
||||
# 3.6 redoes how return_closure works. FIXME: Isolate to LOAD_CLOSURE
|
||||
return_closure ::= LOAD_CLOSURE DUP_TOP STORE_NAME RETURN_VALUE RETURN_LAST
|
||||
|
||||
for_block ::= l_stmts_opt come_from_loops JUMP_BACK
|
||||
come_from_loops ::= COME_FROM_LOOP*
|
||||
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt
|
||||
JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
whilestmt ::= setup_loop testexpr l_stmts_opt
|
||||
come_froms JUMP_BACK come_froms POP_BLOCK COME_FROM_LOOP
|
||||
|
||||
# 3.6 due to jump optimization, we sometimes add RETURN_END_IF where
|
||||
# RETURN_VALUE is meant. Specifcally this can happen in
|
||||
# ifelsestmt -> ...else_suite _. suite_stmts... (last) stmt
|
||||
return ::= ret_expr RETURN_END_IF
|
||||
return ::= ret_expr RETURN_VALUE COME_FROM
|
||||
return_stmt_lambda ::= ret_expr RETURN_VALUE_LAMBDA COME_FROM
|
||||
|
||||
# A COME_FROM is dropped off because of JUMP-to-JUMP optimization
|
||||
and ::= expr jmp_false expr
|
||||
and ::= expr jmp_false expr jmp_false
|
||||
|
||||
jf_cf ::= JUMP_FORWARD COME_FROM
|
||||
cf_jf_else ::= come_froms JUMP_FORWARD ELSE
|
||||
|
||||
conditional ::= expr jmp_false expr jf_cf expr COME_FROM
|
||||
|
||||
async_for_stmt ::= setup_loop expr
|
||||
GET_AITER
|
||||
LOAD_CONST YIELD_FROM SETUP_EXCEPT GET_ANEXT LOAD_CONST
|
||||
YIELD_FROM
|
||||
store
|
||||
POP_BLOCK JUMP_FORWARD COME_FROM_EXCEPT DUP_TOP
|
||||
LOAD_GLOBAL COMPARE_OP POP_JUMP_IF_FALSE
|
||||
POP_TOP POP_TOP POP_TOP POP_EXCEPT POP_BLOCK
|
||||
JUMP_ABSOLUTE END_FINALLY COME_FROM
|
||||
for_block POP_BLOCK
|
||||
COME_FROM_LOOP
|
||||
|
||||
# Adds a COME_FROM_ASYNC_WITH over 3.5
|
||||
# FIXME: remove corresponding rule for 3.5?
|
||||
|
||||
except_suite ::= c_stmts_opt COME_FROM POP_EXCEPT jump_except COME_FROM
|
||||
|
||||
jb_cfs ::= come_from_opt JUMP_BACK come_froms
|
||||
ifelsestmtl ::= testexpr c_stmts_opt jb_cfs else_suitel
|
||||
ifelsestmtl ::= testexpr c_stmts_opt cf_jf_else else_suitel
|
||||
|
||||
# In 3.6+, A sequence of statements ending in a RETURN can cause
|
||||
# JUMP_FORWARD END_FINALLY to be omitted from try middle
|
||||
|
||||
except_return ::= POP_TOP POP_TOP POP_TOP returns
|
||||
except_handler ::= JUMP_FORWARD COME_FROM_EXCEPT except_return
|
||||
|
||||
# Try middle following a returns
|
||||
except_handler36 ::= COME_FROM_EXCEPT except_stmts END_FINALLY
|
||||
|
||||
stmt ::= try_except36
|
||||
try_except36 ::= SETUP_EXCEPT returns except_handler36
|
||||
opt_come_from_except
|
||||
try_except36 ::= SETUP_EXCEPT suite_stmts
|
||||
try_except36 ::= SETUP_EXCEPT suite_stmts_opt POP_BLOCK
|
||||
except_handler36 come_from_opt
|
||||
|
||||
# 3.6 omits END_FINALLY sometimes
|
||||
except_handler36 ::= COME_FROM_EXCEPT except_stmts
|
||||
except_handler36 ::= JUMP_FORWARD COME_FROM_EXCEPT except_stmts
|
||||
except_handler ::= jmp_abs COME_FROM_EXCEPT except_stmts
|
||||
|
||||
stmt ::= tryfinally36
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts
|
||||
tryfinally36 ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY
|
||||
except_suite_finalize ::= SETUP_FINALLY returns
|
||||
COME_FROM_FINALLY suite_stmts_opt END_FINALLY _jump
|
||||
|
||||
stmt ::= tryfinally_return_stmt
|
||||
tryfinally_return_stmt ::= SETUP_FINALLY suite_stmts_opt POP_BLOCK LOAD_CONST
|
||||
COME_FROM_FINALLY
|
||||
|
||||
compare_chained2 ::= expr COMPARE_OP come_froms JUMP_FORWARD
|
||||
"""
|
||||
|
||||
def p_expr3(self, args):
|
||||
def p_37misc(self, args):
|
||||
"""
|
||||
expr ::= conditionalnot
|
||||
conditionalnot ::= expr jmp_true expr jump_forward_else expr COME_FROM
|
||||
# long except clauses in a loop can sometimes cause a JUMP_BACK to turn into a
|
||||
# JUMP_FORWARD to a JUMP_BACK. And when this happens there is an additional
|
||||
# ELSE added to the except_suite. With better flow control perhaps we can
|
||||
# sort this out better.
|
||||
except_suite ::= c_stmts_opt POP_EXCEPT jump_except ELSE
|
||||
|
||||
# a JUMP_FORWARD to another JUMP_FORWARD can get turned into
|
||||
# a JUMP_ABSOLUTE with no COME_FROM
|
||||
conditional ::= expr jmp_false expr jump_absolute_else expr
|
||||
|
||||
# if_expr_true are for conditions which always evaluate true
|
||||
# There is dead or non-optional remnants of the condition code though,
|
||||
# and we use that to match on to reconstruct the source more accurately
|
||||
expr ::= if_expr_true
|
||||
if_expr_true ::= expr JUMP_FORWARD expr COME_FROM
|
||||
# FIXME: the below is to work around test_grammar expecting a "call" to be
|
||||
# on the LHS because it is also somewhere on in a rule.
|
||||
call ::= expr CALL_METHOD_0
|
||||
"""
|
||||
|
||||
def customize_grammar_rules(self, tokens, customize):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2016-2017, 2019 Rocky Bernstein
|
||||
# Copyright (c) 2016-2017, 2019-2020 Rocky Bernstein
|
||||
"""
|
||||
Python 3.7 base code. We keep non-custom-generated grammar rules out of this file.
|
||||
"""
|
||||
@@ -581,6 +581,18 @@ class Python37BaseParser(PythonParser):
|
||||
elif opname == "LOAD_LISTCOMP":
|
||||
self.add_unique_rule("expr ::= listcomp", opname, token.attr, customize)
|
||||
custom_ops_processed.add(opname)
|
||||
elif opname == "LOAD_NAME":
|
||||
if token.attr == "__annotations__" and "SETUP_ANNOTATIONS" in self.seen_ops:
|
||||
token.kind = "LOAD_ANNOTATION"
|
||||
self.addRule(
|
||||
"""
|
||||
stmt ::= SETUP_ANNOTATIONS
|
||||
stmt ::= ann_assign
|
||||
ann_assign ::= expr LOAD_ANNOTATION LOAD_STR STORE_SUBSCR
|
||||
""",
|
||||
nop_func,
|
||||
)
|
||||
pass
|
||||
elif opname == "LOAD_SETCOMP":
|
||||
# Should this be generalized and put under MAKE_FUNCTION?
|
||||
if has_get_iter_call_function1:
|
||||
@@ -962,6 +974,7 @@ class Python37BaseParser(PythonParser):
|
||||
pass
|
||||
|
||||
self.check_reduce["and"] = "AST"
|
||||
self.check_reduce["annotate_tuple"] = "noAST"
|
||||
self.check_reduce["aug_assign1"] = "AST"
|
||||
self.check_reduce["aug_assign2"] = "AST"
|
||||
self.check_reduce["while1stmt"] = "noAST"
|
||||
@@ -972,7 +985,7 @@ class Python37BaseParser(PythonParser):
|
||||
self.check_reduce["iflaststmtl"] = "AST"
|
||||
self.check_reduce["ifstmt"] = "AST"
|
||||
self.check_reduce["ifstmtl"] = "AST"
|
||||
self.check_reduce["annotate_tuple"] = "noAST"
|
||||
self.check_reduce["import_from37"] = "AST"
|
||||
self.check_reduce["or"] = "tokens"
|
||||
|
||||
# FIXME: remove parser errors caused by the below
|
||||
@@ -1103,8 +1116,13 @@ class Python37BaseParser(PythonParser):
|
||||
# FIXME: This is a cheap test. Should we do something with an AST like we
|
||||
# do with "and"?
|
||||
# "or"s with constants like this will have "COME_FROM" at the end
|
||||
return tokens[last] in ("LOAD_ASSERT", "LOAD_STR", "LOAD_CODE", "LOAD_CONST",
|
||||
"RAISE_VARARGS_1")
|
||||
return tokens[last] in (
|
||||
"LOAD_ASSERT",
|
||||
"LOAD_STR",
|
||||
"LOAD_CODE",
|
||||
"LOAD_CONST",
|
||||
"RAISE_VARARGS_1",
|
||||
)
|
||||
elif lhs == "while1elsestmt":
|
||||
|
||||
if last == n:
|
||||
@@ -1143,7 +1161,7 @@ class Python37BaseParser(PythonParser):
|
||||
for i in range(cfl - 1, first, -1):
|
||||
if tokens[i] != "POP_BLOCK":
|
||||
break
|
||||
if tokens[i].kind not in ("JUMP_BACK", "RETURN_VALUE"):
|
||||
if tokens[i].kind not in ("JUMP_BACK", "RETURN_VALUE", "RAISE_VARARGS_1"):
|
||||
if not tokens[i].kind.startswith("COME_FROM"):
|
||||
return True
|
||||
|
||||
@@ -1156,9 +1174,8 @@ class Python37BaseParser(PythonParser):
|
||||
last -= 1
|
||||
offset = tokens[last].off2int()
|
||||
assert tokens[first] == "SETUP_LOOP"
|
||||
if offset != tokens[first].attr:
|
||||
return True
|
||||
return False
|
||||
# SETUP_LOOP location must jump either to the last token or the token after the last one
|
||||
return tokens[first].attr not in (offset, offset + 2)
|
||||
elif lhs == "_ifstmts_jump" and len(rule[1]) > 1 and ast:
|
||||
come_froms = ast[-1]
|
||||
# Make sure all of the "come froms" offset at the
|
||||
@@ -1192,6 +1209,10 @@ class Python37BaseParser(PythonParser):
|
||||
return False
|
||||
|
||||
if isinstance(come_froms, Token):
|
||||
if tokens[pop_jump_index].attr < tokens[pop_jump_index].offset and ast[0] != "pass":
|
||||
# This is a jump backwards to a loop. All bets are off here when there the
|
||||
# unless statement is "pass" which has no instructions associated with it.
|
||||
return False
|
||||
return (
|
||||
come_froms.attr is not None
|
||||
and tokens[pop_jump_index].offset > come_froms.attr
|
||||
@@ -1210,7 +1231,7 @@ class Python37BaseParser(PythonParser):
|
||||
if last == n:
|
||||
last -= 1
|
||||
pass
|
||||
if (tokens[last].attr and isinstance(tokens[last].attr, int)):
|
||||
if tokens[last].attr and isinstance(tokens[last].attr, int):
|
||||
return tokens[first].offset < tokens[last].attr
|
||||
pass
|
||||
|
||||
@@ -1225,7 +1246,14 @@ class Python37BaseParser(PythonParser):
|
||||
for i in range(first, l):
|
||||
t = tokens[i]
|
||||
if t.kind == "POP_JUMP_IF_FALSE":
|
||||
if t.attr > last_offset:
|
||||
pjif_target = t.attr
|
||||
if pjif_target > last_offset:
|
||||
# In come cases, where we have long bytecode, a
|
||||
# "POP_JUMP_IF_FALSE" offset might be too
|
||||
# large for the instruction; so instead it
|
||||
# jumps to a JUMP_FORWARD. Allow that here.
|
||||
if tokens[l] == "JUMP_FORWARD":
|
||||
return tokens[l].attr != pjif_target
|
||||
return True
|
||||
pass
|
||||
pass
|
||||
@@ -1244,7 +1272,11 @@ class Python37BaseParser(PythonParser):
|
||||
if last == n:
|
||||
last -= 1
|
||||
jmp_target = test[1][0].attr
|
||||
if tokens[first].off2int() <= jmp_target < tokens[last].off2int():
|
||||
if (
|
||||
tokens[first].off2int()
|
||||
<= jmp_target
|
||||
< tokens[last].off2int()
|
||||
):
|
||||
return True
|
||||
# jmp_target less than tokens[first] is okay - is to a loop
|
||||
# jmp_target equal tokens[last] is also okay: normal non-optimized non-loop jump
|
||||
@@ -1279,7 +1311,11 @@ class Python37BaseParser(PythonParser):
|
||||
# jmp_target less than tokens[first] is okay - is to a loop
|
||||
# jmp_target equal tokens[last] is also okay: normal non-optimized non-loop jump
|
||||
|
||||
if (last + 1) < n and tokens[last - 1] != "JUMP_BACK" and tokens[last + 1] == "COME_FROM_LOOP":
|
||||
if (
|
||||
(last + 1) < n
|
||||
and tokens[last - 1] != "JUMP_BACK"
|
||||
and tokens[last + 1] == "COME_FROM_LOOP"
|
||||
):
|
||||
# iflastsmtl is not at the end of a loop, but jumped outside of loop. No good.
|
||||
# FIXME: check that tokens[last] == "POP_BLOCK"? Or allow for it not to appear?
|
||||
return True
|
||||
@@ -1323,6 +1359,36 @@ class Python37BaseParser(PythonParser):
|
||||
"_come_froms",
|
||||
),
|
||||
),
|
||||
(
|
||||
"ifelsestmt",
|
||||
(
|
||||
"testexpr",
|
||||
"c_stmts_opt",
|
||||
"jump_forward_else",
|
||||
"else_suite",
|
||||
'\\e__come_froms'
|
||||
),
|
||||
),
|
||||
(
|
||||
"ifelsestmt",
|
||||
(
|
||||
"testexpr",
|
||||
"c_stmts_opt",
|
||||
"jf_cfs",
|
||||
"else_suite",
|
||||
'\\e_opt_come_from_except',
|
||||
),
|
||||
),
|
||||
(
|
||||
"ifelsestmt",
|
||||
(
|
||||
"testexpr",
|
||||
"c_stmts_opt",
|
||||
"come_froms",
|
||||
"else_suite",
|
||||
'come_froms',
|
||||
),
|
||||
),
|
||||
(
|
||||
"ifelsestmt",
|
||||
(
|
||||
@@ -1345,7 +1411,8 @@ class Python37BaseParser(PythonParser):
|
||||
if come_froms == "opt_come_from_except" and len(come_froms) > 0:
|
||||
come_froms = come_froms[0]
|
||||
if not isinstance(come_froms, Token):
|
||||
return tokens[first].offset > come_froms[-1].attr
|
||||
if len(come_froms):
|
||||
return tokens[first].offset > come_froms[-1].attr
|
||||
elif tokens[first].offset > come_froms.attr:
|
||||
return True
|
||||
|
||||
@@ -1363,20 +1430,46 @@ class Python37BaseParser(PythonParser):
|
||||
|
||||
# Check that the condition portion of the "if"
|
||||
# jumps to the "else" part.
|
||||
# Compare with parse30.py of uncompyle6
|
||||
if testexpr[0] in ("testtrue", "testfalse"):
|
||||
test = testexpr[0]
|
||||
|
||||
else_suite = ast[3]
|
||||
assert else_suite == "else_suite"
|
||||
|
||||
if len(test) > 1 and test[1].kind.startswith("jmp_"):
|
||||
if last == n:
|
||||
last -= 1
|
||||
jmp = test[1]
|
||||
jmp_target = jmp[0].attr
|
||||
|
||||
# FIXME: the jump inside "else" check below should be added.
|
||||
#
|
||||
# add this until we can find out what's wrong with
|
||||
# not being able to parse:
|
||||
# if a and b or c:
|
||||
# x = 1
|
||||
# else:
|
||||
# x = 2
|
||||
|
||||
# FIXME: add this
|
||||
# if jmp_target < else_suite.first_child().off2int():
|
||||
# return True
|
||||
|
||||
if tokens[first].off2int() > jmp_target:
|
||||
return True
|
||||
|
||||
return (jmp_target > tokens[last].off2int()) and tokens[
|
||||
last
|
||||
] != "JUMP_FORWARD"
|
||||
|
||||
return False
|
||||
elif lhs == "import_from37":
|
||||
importlist37 = ast[3]
|
||||
alias37 = importlist37[0]
|
||||
if importlist37 == "importlist37" and alias37 == "alias37":
|
||||
store = alias37[1]
|
||||
assert store == "store"
|
||||
return alias37[0].attr != store[0].attr
|
||||
return False
|
||||
|
||||
return False
|
||||
|
@@ -358,6 +358,10 @@ class Scanner37Base(Scanner):
|
||||
# other parts like n_LOAD_CONST in pysource.py for example.
|
||||
pattr = const
|
||||
pass
|
||||
elif opname == "IMPORT_NAME":
|
||||
if "." in inst.argval:
|
||||
opname = "IMPORT_NAME_ATTR"
|
||||
pass
|
||||
elif opname in ("MAKE_FUNCTION", "MAKE_CLOSURE"):
|
||||
flags = argval
|
||||
opname = "MAKE_FUNCTION_%d" % (flags)
|
||||
@@ -888,6 +892,7 @@ class Scanner37Base(Scanner):
|
||||
elif op in self.setup_opts_no_loop:
|
||||
count_SETUP_ += 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from uncompyle6 import PYTHON_VERSION
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2017-2019 by Rocky Bernstein
|
||||
# Copyright (c) 2017-2020 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -230,6 +230,7 @@ TABLE_DIRECT = {
|
||||
(1, 100), (2, 100) ),
|
||||
|
||||
'IMPORT_FROM': ( '%{pattr}', ),
|
||||
'IMPORT_NAME_ATTR': ( '%{pattr}', ),
|
||||
'attribute': ( '%c.%[1]{pattr}',
|
||||
(0, 'expr')),
|
||||
'LOAD_STR': ( '%{pattr}', ),
|
||||
|
@@ -20,8 +20,12 @@ from uncompyle6.semantics.consts import TABLE_DIRECT
|
||||
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.semantics.helper import gen_function_parens_adjust
|
||||
from uncompyle6.semantics.make_function import make_function3_annotate
|
||||
from uncompyle6.semantics.helper import (
|
||||
find_code_node,
|
||||
gen_function_parens_adjust,
|
||||
)
|
||||
|
||||
from uncompyle6.semantics.make_function3 import make_function3_annotate
|
||||
from uncompyle6.semantics.customize35 import customize_for_version35
|
||||
from uncompyle6.semantics.customize36 import customize_for_version36
|
||||
from uncompyle6.semantics.customize37 import customize_for_version37
|
||||
@@ -139,9 +143,14 @@ def customize_for_version3(self, version):
|
||||
list_ifs.append([1])
|
||||
n = n[2]
|
||||
pass
|
||||
elif n == "list_if37":
|
||||
list_ifs.append(n)
|
||||
n = n[-1]
|
||||
pass
|
||||
pass
|
||||
|
||||
assert n == "lc_body", ast
|
||||
|
||||
self.preorder(n[0])
|
||||
|
||||
# FIXME: add indentation around "for"'s and "in"'s
|
||||
@@ -158,6 +167,7 @@ def customize_for_version3(self, version):
|
||||
self.listcomp_closure3 = listcomp_closure3
|
||||
|
||||
def n_classdef3(node):
|
||||
|
||||
# class definition ('class X(A,B,C):')
|
||||
cclass = self.currentclass
|
||||
|
||||
@@ -228,10 +238,10 @@ def customize_for_version3(self, version):
|
||||
# Python 3.3 classes with closures work like this.
|
||||
# Note have to test before 3.2 case because
|
||||
# index -2 also has an attr.
|
||||
subclass_code = load_closure[-3].attr
|
||||
subclass_code = find_code_node(load_closure, -3).attr
|
||||
elif hasattr(load_closure[-2], "attr"):
|
||||
# Python 3.2 works like this
|
||||
subclass_code = load_closure[-2].attr
|
||||
subclass_code = find_code_node(load_closure, -2).attr
|
||||
else:
|
||||
raise "Internal Error n_classdef: cannot find class body"
|
||||
if hasattr(build_class[3], "__len__"):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
# Copyright (c) 2019-2020 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -15,61 +15,153 @@
|
||||
"""Isolate Python 3.7 version-specific semantic actions here.
|
||||
"""
|
||||
|
||||
from uncompyle6.semantics.consts import PRECEDENCE, TABLE_DIRECT
|
||||
from uncompyle6.semantics.consts import (
|
||||
PRECEDENCE,
|
||||
TABLE_DIRECT,
|
||||
maxint,
|
||||
)
|
||||
|
||||
def customize_for_version37(self, version):
|
||||
########################
|
||||
# Python 3.7+ changes
|
||||
#######################
|
||||
|
||||
PRECEDENCE['attribute37'] = 2
|
||||
PRECEDENCE['if_exp_37a'] = 28
|
||||
PRECEDENCE['if_exp_37b'] = 28
|
||||
PRECEDENCE["attribute37"] = 2
|
||||
PRECEDENCE["call_ex"] = 1
|
||||
PRECEDENCE["call_ex_kw"] = 1
|
||||
PRECEDENCE["call_ex_kw2"] = 1
|
||||
PRECEDENCE["call_ex_kw3"] = 1
|
||||
PRECEDENCE["call_ex_kw4"] = 1
|
||||
PRECEDENCE["call_kw"] = 0
|
||||
PRECEDENCE["call_kw36"] = 1
|
||||
PRECEDENCE["formatted_value1"] = 100
|
||||
PRECEDENCE["if_exp_37a"] = 28
|
||||
PRECEDENCE["if_exp_37b"] = 28
|
||||
PRECEDENCE["unmap_dict"] = 0
|
||||
|
||||
TABLE_DIRECT.update({
|
||||
'and_not': ( '%c and not %c',
|
||||
(0, 'expr'), (2, 'expr') ),
|
||||
'async_forelse_stmt': (
|
||||
'%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n',
|
||||
(7, 'store'), (1, 'expr'), (17, 'for_block'), (25, 'else_suite') ),
|
||||
'async_for_stmt': (
|
||||
'%|async for %c in %c:\n%+%c%-\n\n',
|
||||
(7, 'store'), (1, 'expr'), (17, 'for_block')),
|
||||
'async_for_stmt37': (
|
||||
'%|async for %c in %c:\n%+%c%-%-\n\n',
|
||||
(7, 'store'), (1, 'expr'), (16, 'for_block') ),
|
||||
'attribute37': ( '%c.%[1]{pattr}', 0 ),
|
||||
'compare_chained1a_37': (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19), (-4, 19)),
|
||||
'compare_chained1_false_37': (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19), (-4, 19)),
|
||||
'compare_chained2_false_37': (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19), (-5, 19)),
|
||||
'compare_chained1b_37': (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19), (-4, 19)),
|
||||
'compare_chained1c_37': (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19), (-2, 19)),
|
||||
'compare_chained2a_37': (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, 19) ),
|
||||
'compare_chained2b_37': (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, 19) ),
|
||||
'compare_chained2a_false_37': (
|
||||
'%[1]{pattr.replace("-", " ")} %p',
|
||||
(0, 19 ) ),
|
||||
'compare_chained2c_37': (
|
||||
'%[3]{pattr.replace("-", " ")} %p %p', (0, 19), (6, 19) ),
|
||||
'if_exp_37a': ( '%p if %p else %p', (1, 'expr', 27), (0, 27), (4, 'expr', 27) ),
|
||||
'if_exp_37b': ( '%p if %p else %p', (2, 'expr', 27), (0, 'expr', 27), (5, 'expr', 27) ),
|
||||
'testfalse_not_or': ( "not %c or %c",
|
||||
(0, "expr"),
|
||||
(2, "expr") ),
|
||||
'testfalse_not_and': ( "not (%c)", 0 ),
|
||||
TABLE_DIRECT.update(
|
||||
{
|
||||
"and_not": ("%c and not %c", (0, "expr"), (2, "expr")),
|
||||
"ann_assign": (
|
||||
"%|%[2]{attr}: %c\n", 0,
|
||||
),
|
||||
"ann_assign_init": (
|
||||
"%|%[2]{attr}: %c = %c\n", 0, 1,
|
||||
),
|
||||
"async_for_stmt": (
|
||||
"%|async for %c in %c:\n%+%c%-\n\n",
|
||||
(7, "store"),
|
||||
(1, "expr"),
|
||||
(17, "for_block"),
|
||||
),
|
||||
"async_for_stmt36": (
|
||||
"%|async for %c in %c:\n%+%c%-%-\n\n",
|
||||
(9, "store"),
|
||||
(1, "expr"),
|
||||
(18, "for_block"),
|
||||
),
|
||||
"async_for_stmt37": (
|
||||
"%|async for %c in %c:\n%+%c%-%-\n\n",
|
||||
(7, "store"),
|
||||
(1, "expr"),
|
||||
(16, "for_block"),
|
||||
),
|
||||
"async_with_stmt": ("%|async with %c:\n%+%c%-", (0, "expr"), 7),
|
||||
"async_with_as_stmt": (
|
||||
"%|async with %c as %c:\n%+%c%-",
|
||||
(0, "expr"),
|
||||
(6, "store"),
|
||||
7,
|
||||
),
|
||||
"async_forelse_stmt": (
|
||||
"%|async for %c in %c:\n%+%c%-%|else:\n%+%c%-\n\n",
|
||||
(7, "store"),
|
||||
(1, "expr"),
|
||||
(17, "for_block"),
|
||||
(25, "else_suite"),
|
||||
),
|
||||
"attribute37": ("%c.%[1]{pattr}", 0),
|
||||
"attributes37": ("%[0]{pattr} import %c",
|
||||
(0, "IMPORT_NAME_ATTR"),
|
||||
(1, "IMPORT_FROM")),
|
||||
"await_expr": ("await %c", 0),
|
||||
"await_stmt": ("%|%c\n", 0),
|
||||
"call_ex": ("%c(%p)", (0, "expr"), (1, 100)),
|
||||
"compare_chained1a_37": (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(-4, 19),
|
||||
),
|
||||
"compare_chained1_false_37": (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(-4, 19),
|
||||
),
|
||||
"compare_chained2_false_37": (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(-5, 19),
|
||||
),
|
||||
"compare_chained1b_false_37": (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(-4, 19),
|
||||
),
|
||||
"compare_chained1c_37": (
|
||||
' %[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(-2, 19),
|
||||
),
|
||||
"compare_chained2a_37": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)),
|
||||
"compare_chained2b_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)),
|
||||
"compare_chained2a_false_37": ('%[1]{pattr.replace("-", " ")} %p', (0, 19)),
|
||||
"compare_chained2c_37": (
|
||||
'%[3]{pattr.replace("-", " ")} %p %p',
|
||||
(0, 19),
|
||||
(6, 19),
|
||||
),
|
||||
"except_return": ("%|except:\n%+%c%-", 3),
|
||||
"if_exp_37a": (
|
||||
"%p if %p else %p",
|
||||
(1, "expr", 27),
|
||||
(0, 27),
|
||||
(4, "expr", 27),
|
||||
),
|
||||
"if_exp_37b": (
|
||||
"%p if %p else %p",
|
||||
(2, "expr", 27),
|
||||
(0, "expr", 27),
|
||||
(5, "expr", 27),
|
||||
),
|
||||
"ifstmtl": ("%|if %c:\n%+%c%-", (0, "testexpr"), (1, "_ifstmts_jumpl")),
|
||||
'import_as37': ( '%|import %c as %c\n', 2, -2),
|
||||
'import_from37': ( '%|from %[2]{pattr} import %c\n',
|
||||
(3, 'importlist37') ),
|
||||
|
||||
})
|
||||
"importattr37": ("%c", (0, "IMPORT_NAME_ATTR")),
|
||||
"importlist37": ("%C", (0, maxint, ", ")),
|
||||
"list_if37": (" if %p%c", (0, 27), 1),
|
||||
"list_if37_not": (" if not %p%c", (0, 27), 1),
|
||||
"testfalse_not_or": ("not %c or %c", (0, "expr"), (2, "expr")),
|
||||
"testfalse_not_and": ("not (%c)", 0),
|
||||
"try_except36": ("%|try:\n%+%c%-%c\n\n", 1, -2),
|
||||
"tryfinally36": ("%|try:\n%+%c%-%|finally:\n%+%c%-\n\n", (1, "returns"), 3),
|
||||
"unmap_dict": ("{**%C}", (0, -1, ", **")),
|
||||
"unpack_list": ("*%c", (0, "list")),
|
||||
"yield_from": ("yield from %c", (0, "expr")),
|
||||
}
|
||||
)
|
||||
|
||||
def n_importlist37(node):
|
||||
if len(node) == 1:
|
||||
self.default(node)
|
||||
return
|
||||
n = len(node) - 1
|
||||
for i in range(n, -1, -1):
|
||||
if node[i] != "ROT_TWO":
|
||||
break
|
||||
self.template_engine(("%C", (0, i + 1, ', ')), node)
|
||||
self.prune()
|
||||
return
|
||||
|
||||
self.n_importlist37 = n_importlist37
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import sys
|
||||
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
|
||||
from uncompyle6 import PYTHON3
|
||||
@@ -16,6 +17,23 @@ read_global_ops = frozenset(('STORE_GLOBAL', 'DELETE_GLOBAL'))
|
||||
# NOTE: we also need to check that the variable name is a free variable, not a cell variable.
|
||||
nonglobal_ops = frozenset(('STORE_DEREF', 'DELETE_DEREF'))
|
||||
|
||||
def escape_string(s, quotes=('"', "'", '"""', "'''")):
|
||||
quote = None
|
||||
for q in quotes:
|
||||
if s.find(q) == -1:
|
||||
quote = q
|
||||
break
|
||||
pass
|
||||
if quote is None:
|
||||
quote = '"""'
|
||||
s = s.replace('"""', '\\"""')
|
||||
|
||||
for (orig, replace) in (('\t', '\\t'),
|
||||
('\n', '\\n'),
|
||||
('\r', '\\r')):
|
||||
s = s.replace(orig, replace)
|
||||
return "%s%s%s" % (quote, s, quote)
|
||||
|
||||
# FIXME: this and find_globals could be paramaterized with one of the
|
||||
# above global ops
|
||||
def find_all_globals(node, globs):
|
||||
@@ -27,6 +45,30 @@ def find_all_globals(node, globs):
|
||||
globs.add(n.pattr)
|
||||
return globs
|
||||
|
||||
# def find_globals(node, globs, global_ops=mkfunc_globals):
|
||||
# """Find globals in this statement."""
|
||||
# for n in node:
|
||||
# # print("XXX", n.kind, global_ops)
|
||||
# if isinstance(n, SyntaxTree):
|
||||
# # FIXME: do I need a caser for n.kind="mkfunc"?
|
||||
# if n.kind in ("if_expr_lambda", "return_lambda"):
|
||||
# globs = find_globals(n, globs, mklambda_globals)
|
||||
# else:
|
||||
# globs = find_globals(n, globs, global_ops)
|
||||
# elif n.kind in frozenset(global_ops):
|
||||
# globs.add(n.pattr)
|
||||
# return globs
|
||||
|
||||
def find_code_node(node, start):
|
||||
for i in range(-start, len(node) + 1):
|
||||
if node[-i].kind == "LOAD_CODE":
|
||||
code_node = node[-i]
|
||||
assert iscode(code_node.attr)
|
||||
return code_node
|
||||
pass
|
||||
assert False, "did not find code node starting at %d in %s" % (start, node)
|
||||
|
||||
|
||||
def find_globals_and_nonlocals(node, globs, nonlocals, code, version):
|
||||
"""search a node of parse tree to find variable names that need a
|
||||
either 'global' or 'nonlocal' statements added."""
|
||||
@@ -44,20 +86,6 @@ def find_globals_and_nonlocals(node, globs, nonlocals, code, version):
|
||||
nonlocals.add(n.pattr)
|
||||
return globs, nonlocals
|
||||
|
||||
# def find_globals(node, globs, global_ops=mkfunc_globals):
|
||||
# """Find globals in this statement."""
|
||||
# for n in node:
|
||||
# # print("XXX", n.kind, global_ops)
|
||||
# if isinstance(n, SyntaxTree):
|
||||
# # FIXME: do I need a caser for n.kind="mkfunc"?
|
||||
# if n.kind in ("if_expr_lambda", "return_lambda"):
|
||||
# globs = find_globals(n, globs, mklambda_globals)
|
||||
# else:
|
||||
# globs = find_globals(n, globs, global_ops)
|
||||
# elif n.kind in frozenset(global_ops):
|
||||
# globs.add(n.pattr)
|
||||
# return globs
|
||||
|
||||
def find_none(node):
|
||||
for n in node:
|
||||
if isinstance(n, SyntaxTree):
|
||||
@@ -68,35 +96,47 @@ def find_none(node):
|
||||
return True
|
||||
return False
|
||||
|
||||
def escape_string(str, quotes=('"', "'", '"""', "'''")):
|
||||
quote = None
|
||||
for q in quotes:
|
||||
if str.find(q) == -1:
|
||||
quote = q
|
||||
break
|
||||
def flatten_list(node):
|
||||
"""
|
||||
List of expressions may be nested in groups of 32 and 1024
|
||||
items. flatten that out and return the list
|
||||
"""
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
assert subelem == 'expr32'
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
assert subelem == 'expr'
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
pass
|
||||
pass
|
||||
if quote is None:
|
||||
quote = '"""'
|
||||
str = str.replace('"""', '\\"""')
|
||||
return flat_elems
|
||||
|
||||
for (orig, replace) in (('\t', '\\t'),
|
||||
('\n', '\\n'),
|
||||
('\r', '\\r')):
|
||||
str = str.replace(orig, replace)
|
||||
return "%s%s%s" % (quote, str, quote)
|
||||
# Note: this is only used in Python > 3.0
|
||||
# Should move this somewhere more specific?
|
||||
def gen_function_parens_adjust(mapping_key, node):
|
||||
"""If we can avoid the outer parenthesis
|
||||
of a generator function, set the node key to
|
||||
'call_generator' and the caller will do the default
|
||||
action on that. Otherwise we do nothing.
|
||||
"""
|
||||
if mapping_key.kind != 'CALL_FUNCTION_1':
|
||||
return
|
||||
|
||||
def strip_quotes(str):
|
||||
if str.startswith("'''") and str.endswith("'''"):
|
||||
str = str[3:-3]
|
||||
elif str.startswith('"""') and str.endswith('"""'):
|
||||
str = str[3:-3]
|
||||
elif str.startswith("'") and str.endswith("'"):
|
||||
str = str[1:-1]
|
||||
elif str.startswith('"') and str.endswith('"'):
|
||||
str = str[1:-1]
|
||||
args_node = node[-2]
|
||||
if args_node == 'pos_arg':
|
||||
assert args_node[0] == 'expr'
|
||||
n = args_node[0][0]
|
||||
if n == 'generator_exp':
|
||||
node.kind = 'call_generator'
|
||||
pass
|
||||
return str
|
||||
|
||||
return
|
||||
|
||||
def print_docstring(self, indent, docstring):
|
||||
quote = '"""'
|
||||
@@ -173,48 +213,18 @@ def print_docstring(self, indent, docstring):
|
||||
self.println(lines[-1], quote)
|
||||
return True
|
||||
|
||||
|
||||
def flatten_list(node):
|
||||
"""
|
||||
List of expressions may be nested in groups of 32 and 1024
|
||||
items. flatten that out and return the list
|
||||
"""
|
||||
flat_elems = []
|
||||
for elem in node:
|
||||
if elem == 'expr1024':
|
||||
for subelem in elem:
|
||||
assert subelem == 'expr32'
|
||||
for subsubelem in subelem:
|
||||
flat_elems.append(subsubelem)
|
||||
elif elem == 'expr32':
|
||||
for subelem in elem:
|
||||
assert subelem == 'expr'
|
||||
flat_elems.append(subelem)
|
||||
else:
|
||||
flat_elems.append(elem)
|
||||
pass
|
||||
def strip_quotes(s):
|
||||
if s.startswith("'''") and s.endswith("'''"):
|
||||
s = s[3:-3]
|
||||
elif s.startswith('"""') and s.endswith('"""'):
|
||||
s = s[3:-3]
|
||||
elif s.startswith("'") and s.endswith("'"):
|
||||
s = s[1:-1]
|
||||
elif s.startswith('"') and s.endswith('"'):
|
||||
s = s[1:-1]
|
||||
pass
|
||||
return flat_elems
|
||||
return s
|
||||
|
||||
# Note: this is only used in Python > 3.0
|
||||
# Should move this somewhere more specific?
|
||||
def gen_function_parens_adjust(mapping_key, node):
|
||||
"""If we can avoid the outer parenthesis
|
||||
of a generator function, set the node key to
|
||||
'call_generator' and the caller will do the default
|
||||
action on that. Otherwise we do nothing.
|
||||
"""
|
||||
if mapping_key.kind != 'CALL_FUNCTION_1':
|
||||
return
|
||||
|
||||
args_node = node[-2]
|
||||
if args_node == 'pos_arg':
|
||||
assert args_node[0] == 'expr'
|
||||
n = args_node[0][0]
|
||||
if n == 'generator_exp':
|
||||
node.kind = 'call_generator'
|
||||
pass
|
||||
return
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
|
207
uncompyle6/semantics/make_function2.py
Normal file
207
uncompyle6/semantics/make_function2.py
Normal file
@@ -0,0 +1,207 @@
|
||||
# Copyright (c) 2015-2019 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions in Python before 3.0.
|
||||
The saga of changes continues in 3.0 and above and in other files.
|
||||
"""
|
||||
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.parser import ParserError as ParserError2
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring,
|
||||
find_all_globals,
|
||||
find_globals_and_nonlocals,
|
||||
find_none,
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from uncompyle6.show import maybe_show_tree_param_default
|
||||
|
||||
def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
Dump function defintion, doc string, and function body.
|
||||
This code is specialied for Python 2.
|
||||
"""
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
if name.startswith("."):
|
||||
# replace the name with the tuple-string
|
||||
name = self.get_tuple_parameter(ast, name)
|
||||
pass
|
||||
|
||||
if default:
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
result = "%s=%s" % (name, value)
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are after kwargs
|
||||
defparams = node[1 : args_node.attr[0] + 1]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
paramnames.reverse()
|
||||
defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(
|
||||
code._tokens,
|
||||
code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
params = [
|
||||
build_param(ast, name, default)
|
||||
for name, default in zip_longest(paramnames, defparams, fillvalue=None)
|
||||
]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
params.append("*%s" % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if is_lambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
# If the last statement is None (which is the
|
||||
# same thing as "return None" in a lambda) and the
|
||||
# next to last statement is a "yield". Then we want to
|
||||
# drop the (return) None since that was just put there
|
||||
# to have something to after the yield finishes.
|
||||
# FIXME: this is a bit hoaky and not general
|
||||
if (
|
||||
len(ast) > 1
|
||||
and self.traverse(ast[-1]) == "None"
|
||||
and self.traverse(ast[-2]).strip().startswith("yield")
|
||||
):
|
||||
del ast[-1]
|
||||
# Now pick out the expr part of the last statement
|
||||
ast_expr = ast[-1]
|
||||
while ast_expr.kind != "expr":
|
||||
ast_expr = ast_expr[0]
|
||||
ast[-1] = ast_expr
|
||||
pass
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
for n in node:
|
||||
if n == "pos_arg":
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
break
|
||||
pass
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(", ")
|
||||
self.write("**%s" % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if (
|
||||
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
|
||||
): # ugly
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
if not is_lambda:
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
|
||||
# Python 2 doesn't support the "nonlocal" statement
|
||||
assert self.version >= 3.0 or not nonlocals
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, "global ", g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(
|
||||
ast, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
code._tokens = None
|
||||
code._customize = None # save memory
|
@@ -1,5 +1,4 @@
|
||||
# Copyright (c) 2015-2019 by Rocky Bernstein
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 2015-2020 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -14,7 +13,8 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions
|
||||
All the crazy things we have to do to handle Python functions in 3.0-3.5 or so.
|
||||
The saga of changes before and after is in other files.
|
||||
"""
|
||||
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
|
||||
from uncompyle6.scanner import Code
|
||||
@@ -303,179 +303,6 @@ def make_function3_annotate(
|
||||
code._tokens = code._customize = None # save memory
|
||||
|
||||
|
||||
def make_function2(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""
|
||||
Dump function defintion, doc string, and function body.
|
||||
This code is specialied for Python 2.
|
||||
"""
|
||||
|
||||
# FIXME: call make_function3 if we are self.version >= 3.0
|
||||
# and then simplify the below.
|
||||
|
||||
def build_param(ast, name, default):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
# if formal parameter is a tuple, the paramater name
|
||||
# starts with a dot (eg. '.1', '.2')
|
||||
if name.startswith("."):
|
||||
# replace the name with the tuple-string
|
||||
name = self.get_tuple_parameter(ast, name)
|
||||
pass
|
||||
|
||||
if default:
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
result = "%s=%s" % (name, value)
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
return result
|
||||
else:
|
||||
return name
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
args_node = node[-1]
|
||||
if isinstance(args_node.attr, tuple):
|
||||
# positional args are after kwargs
|
||||
defparams = node[1 : args_node.attr[0] + 1]
|
||||
pos_args, kw_args, annotate_argc = args_node.attr
|
||||
else:
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
lambda_index = None
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
|
||||
assert iscode(code)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
paramnames = list(code.co_varnames[:argc])
|
||||
|
||||
# defaults are for last n parameters, thus reverse
|
||||
paramnames.reverse()
|
||||
defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(
|
||||
code._tokens,
|
||||
code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
kw_pairs = 0
|
||||
indent = self.indent
|
||||
|
||||
# build parameters
|
||||
params = [
|
||||
build_param(ast, name, default)
|
||||
for name, default in zip_longest(paramnames, defparams, fillvalue=None)
|
||||
]
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
params.append("*%s" % code.co_varnames[argc])
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if is_lambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
# If the last statement is None (which is the
|
||||
# same thing as "return None" in a lambda) and the
|
||||
# next to last statement is a "yield". Then we want to
|
||||
# drop the (return) None since that was just put there
|
||||
# to have something to after the yield finishes.
|
||||
# FIXME: this is a bit hoaky and not general
|
||||
if (
|
||||
len(ast) > 1
|
||||
and self.traverse(ast[-1]) == "None"
|
||||
and self.traverse(ast[-2]).strip().startswith("yield")
|
||||
):
|
||||
del ast[-1]
|
||||
# Now pick out the expr part of the last statement
|
||||
ast_expr = ast[-1]
|
||||
while ast_expr.kind != "expr":
|
||||
ast_expr = ast_expr[0]
|
||||
ast[-1] = ast_expr
|
||||
pass
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
|
||||
if kw_args > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
else:
|
||||
self.write(", ")
|
||||
|
||||
for n in node:
|
||||
if n == "pos_arg":
|
||||
continue
|
||||
else:
|
||||
self.preorder(n)
|
||||
break
|
||||
pass
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if argc > 0:
|
||||
self.write(", ")
|
||||
self.write("**%s" % code.co_varnames[argc + kw_pairs])
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.println("):")
|
||||
|
||||
if (
|
||||
len(code.co_consts) > 0 and code.co_consts[0] is not None and not is_lambda
|
||||
): # ugly
|
||||
# docstring exists, dump it
|
||||
print_docstring(self, indent, code.co_consts[0])
|
||||
|
||||
code._tokens = None # save memory
|
||||
if not is_lambda:
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
|
||||
# Python 2 doesn't support the "nonlocal" statement
|
||||
assert self.version >= 3.0 or not nonlocals
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, "global ", g)
|
||||
self.mod_globs -= all_globals
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(
|
||||
ast, code.co_name, code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
code._tokens = None
|
||||
code._customize = None # save memory
|
||||
|
||||
|
||||
def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""Dump function definition, doc string, and function body in
|
||||
Python version 3.0 and above
|
||||
@@ -514,10 +341,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
if self.version >= 3.6:
|
||||
value = default
|
||||
else:
|
||||
value = self.traverse(default, indent="")
|
||||
value = self.traverse(default, indent="")
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
if annotation:
|
||||
result = "%s: %s=%s" % (name, annotation, value)
|
||||
@@ -553,9 +377,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
# not to be confused with keyword parameters which may appear after *.
|
||||
args_attr = args_node.attr
|
||||
|
||||
if isinstance(args_attr, tuple) or (
|
||||
self.version >= 3.6 and isinstance(args_attr, list)
|
||||
):
|
||||
if isinstance(args_attr, tuple):
|
||||
if len(args_attr) == 3:
|
||||
pos_args, kw_args, annotate_argc = args_attr
|
||||
else:
|
||||
@@ -601,9 +423,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
lc_index = -3
|
||||
pass
|
||||
|
||||
if (
|
||||
self.version <= 3.5
|
||||
and len(node) > 2
|
||||
if (len(node) > 2
|
||||
and (have_kwargs or node[lc_index].kind != "load_closure")
|
||||
):
|
||||
|
||||
@@ -631,85 +451,11 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
default_values_start : default_values_start + args_node.attr[0]
|
||||
]
|
||||
else:
|
||||
if self.version < 3.6:
|
||||
defparams = node[: args_node.attr[0]]
|
||||
kw_args = 0
|
||||
else:
|
||||
defparams = []
|
||||
# FIXME: DRY with code below
|
||||
default, kw_args, annotate_argc = args_node.attr[0:3]
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert (
|
||||
expr_node == "expr"
|
||||
), "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(
|
||||
expr_node[0].attr, tuple
|
||||
):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [
|
||||
self.traverse(n, indent="") for n in expr_node[0][:-1]
|
||||
]
|
||||
else:
|
||||
defparams = []
|
||||
pass
|
||||
else:
|
||||
if self.version < 3.6:
|
||||
defparams = node[: args_node.attr]
|
||||
defparams = node[: args_node.attr[0]]
|
||||
kw_args = 0
|
||||
else:
|
||||
default, kw_args, annotate, closure = args_node.attr
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert (
|
||||
expr_node == "expr"
|
||||
), "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(
|
||||
expr_node[0].attr, tuple
|
||||
):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
|
||||
else:
|
||||
defparams = []
|
||||
|
||||
i = -4
|
||||
kw_pairs = 0
|
||||
if closure:
|
||||
# FIXME: fill in
|
||||
annotate = node[i]
|
||||
i -= 1
|
||||
if annotate_argc:
|
||||
# Turn into subroutine and DRY with other use
|
||||
annotate_node = node[i]
|
||||
if annotate_node == "expr":
|
||||
annotate_node = annotate_node[0]
|
||||
annotate_name_node = annotate_node[-1]
|
||||
if annotate_node == "dict" and annotate_name_node.kind.startswith(
|
||||
"BUILD_CONST_KEY_MAP"
|
||||
):
|
||||
types = [
|
||||
self.traverse(n, indent="") for n in annotate_node[:-2]
|
||||
]
|
||||
names = annotate_node[-2].attr
|
||||
l = len(types)
|
||||
assert l == len(names)
|
||||
for i in range(l):
|
||||
annotate_dict[names[i]] = types[i]
|
||||
pass
|
||||
pass
|
||||
i -= 1
|
||||
if kw_args:
|
||||
kw_node = node[i]
|
||||
if kw_node == "expr":
|
||||
kw_node = kw_node[0]
|
||||
if kw_node == "dict":
|
||||
kw_pairs = kw_node[-1].attr
|
||||
else:
|
||||
defparams = node[: args_node.attr]
|
||||
kw_args = 0
|
||||
pass
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
@@ -727,7 +473,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
|
||||
paramnames = list(scanner_code.co_varnames[:argc])
|
||||
if kwonlyargcount > 0:
|
||||
if self.version <= 3.5 and is_lambda:
|
||||
if is_lambda:
|
||||
kwargs = []
|
||||
for i in range(kwonlyargcount):
|
||||
paramnames.append(scanner_code.co_varnames[argc+i])
|
||||
@@ -753,11 +499,7 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
if self.version >= 3.0:
|
||||
if self.version < 3.6:
|
||||
kw_pairs = args_node.attr[1]
|
||||
else:
|
||||
kw_pairs = 0
|
||||
kw_pairs = 0
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
|
||||
@@ -786,19 +528,20 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
if self.version > 3.0:
|
||||
star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_arg in annotate_dict:
|
||||
params.append("*%s: %s" % (star_arg, annotate_dict[star_arg]))
|
||||
else:
|
||||
params.append("*%s" % star_arg)
|
||||
pass
|
||||
if is_lambda and self.version <= 3.5:
|
||||
params.reverse()
|
||||
star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_arg in annotate_dict:
|
||||
params.append("*%s: %s" % (star_arg, annotate_dict[star_arg]))
|
||||
else:
|
||||
params.append("*%s" % code.co_varnames[argc])
|
||||
if not is_lambda or self.version >= 3.6:
|
||||
params.append("*%s" % star_arg)
|
||||
pass
|
||||
if is_lambda:
|
||||
params.reverse()
|
||||
if not is_lambda:
|
||||
argc += 1
|
||||
pass
|
||||
elif is_lambda and kwonlyargcount > 0:
|
||||
params.insert(0, "*")
|
||||
kwonlyargcount = 0
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if is_lambda:
|
||||
@@ -843,106 +586,37 @@ def make_function3(self, node, is_lambda, nested=1, code_node=None):
|
||||
self.write(", ")
|
||||
ends_in_comma = True
|
||||
|
||||
if 3.0 <= self.version <= 3.5:
|
||||
kw_args = [None] * kwonlyargcount
|
||||
kw_nodes = node[0]
|
||||
if kw_nodes == "kwargs":
|
||||
for n in kw_nodes:
|
||||
name = eval(n[0].pattr)
|
||||
default = self.traverse(n[1], indent="")
|
||||
idx = kwargs.index(name)
|
||||
kw_args[idx] = "%s=%s" % (name, default)
|
||||
pass
|
||||
kw_args = [None] * kwonlyargcount
|
||||
kw_nodes = node[args_node.attr[0]]
|
||||
if kw_nodes == "kwargs":
|
||||
for n in kw_nodes:
|
||||
name = eval(n[0].pattr)
|
||||
default = self.traverse(n[1], indent="")
|
||||
idx = kwargs.index(name)
|
||||
kw_args[idx] = "%s=%s" % (name, default)
|
||||
pass
|
||||
pass
|
||||
|
||||
# FIXME: something weird is going on and the below
|
||||
# might not be right. On 3.4 kw_nodes != "kwarg"
|
||||
# because of some sort of type mismatch. I think
|
||||
# the test is for versions earlier than 3.3
|
||||
# on 3.5 if we have "kwarg" we still want to do this.
|
||||
# Perhaps we should be testing that kw_nodes is iterable?
|
||||
if kw_nodes != "kwarg" or self.version == 3.5:
|
||||
other_kw = [c == None for c in kw_args]
|
||||
|
||||
for i, flag in enumerate(other_kw):
|
||||
if flag:
|
||||
if i < len(kwargs):
|
||||
kw_args[i] = "%s" % kwargs[i]
|
||||
else:
|
||||
del kw_args[i]
|
||||
pass
|
||||
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
pass
|
||||
elif self.version >= 3.6:
|
||||
# argc = node[-1].attr
|
||||
# co = node[-3].attr
|
||||
# argcount = co.co_argcount
|
||||
# kwonlyargcount = co.co_kwonlyargcount
|
||||
|
||||
free_tup = ann_dict = kw_dict = default_tup = None
|
||||
fn_bits = node[-1].attr
|
||||
|
||||
# Skip over:
|
||||
# MAKE_FUNCTION,
|
||||
# optional docstring
|
||||
# LOAD_CONST qualified name,
|
||||
# LOAD_CONST code object
|
||||
index = -4 # Skip over:
|
||||
if node[-2] == "docstring":
|
||||
index = -5
|
||||
else:
|
||||
index = -4
|
||||
|
||||
if fn_bits[-1]:
|
||||
free_tup = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-2]:
|
||||
ann_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-3]:
|
||||
kw_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-4]:
|
||||
default_tup = node[index]
|
||||
|
||||
if kw_dict == "expr":
|
||||
kw_dict = kw_dict[0]
|
||||
|
||||
# FIXME: handle free_tup, annotate_dict, and default_tup
|
||||
kw_args = [None] * kwonlyargcount
|
||||
|
||||
if kw_dict:
|
||||
assert kw_dict == "dict"
|
||||
defaults = [self.traverse(n, indent="") for n in kw_dict[:-2]]
|
||||
names = eval(self.traverse(kw_dict[-2]))
|
||||
assert len(defaults) == len(names)
|
||||
sep = ""
|
||||
# FIXME: possibly handle line breaks
|
||||
for i, n in enumerate(names):
|
||||
idx = kwargs.index(n)
|
||||
if annotate_dict and n in annotate_dict:
|
||||
t = "%s: %s=%s" % (n, annotate_dict[n], defaults[i])
|
||||
else:
|
||||
t = "%s=%s" % (n, defaults[i])
|
||||
kw_args[idx] = t
|
||||
pass
|
||||
pass
|
||||
|
||||
# handle others
|
||||
# FIXME: something weird is going on and the below
|
||||
# might not be right. On 3.4 kw_nodes != "kwarg"
|
||||
# because of some sort of type mismatch. I think
|
||||
# the test is for versions earlier than 3.3
|
||||
# on 3.5 if we have "kwarg" we still want to do this.
|
||||
# Perhaps we should be testing that kw_nodes is iterable?
|
||||
if kw_nodes != "kwarg" or self.version == 3.5:
|
||||
other_kw = [c == None for c in kw_args]
|
||||
|
||||
for i, flag in enumerate(other_kw):
|
||||
if flag:
|
||||
n = kwargs[i]
|
||||
if ann_dict and n in annotate_dict:
|
||||
kw_args[i] = "%s: %s" % (n, annotate_dict[n])
|
||||
if i < len(kwargs):
|
||||
kw_args[i] = "%s" % kwargs[i]
|
||||
else:
|
||||
kw_args[i] = "%s" % n
|
||||
del kw_args[i]
|
||||
pass
|
||||
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
pass
|
||||
|
||||
pass
|
||||
else:
|
398
uncompyle6/semantics/make_function36.py
Normal file
398
uncompyle6/semantics/make_function36.py
Normal file
@@ -0,0 +1,398 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
All the crazy things we have to do to handle Python functions in 3.6 and above.
|
||||
The saga of changes before 3.6 is in other files.
|
||||
"""
|
||||
from xdis.code import iscode, code_has_star_arg, code_has_star_star_arg
|
||||
from uncompyle6.scanner import Code
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.parser import ParserError as ParserError2
|
||||
from uncompyle6 import PYTHON3
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring,
|
||||
find_all_globals,
|
||||
find_globals_and_nonlocals,
|
||||
find_none,
|
||||
)
|
||||
|
||||
if PYTHON3:
|
||||
from itertools import zip_longest
|
||||
else:
|
||||
from itertools import izip_longest as zip_longest
|
||||
|
||||
from uncompyle6.show import maybe_show_tree_param_default
|
||||
|
||||
|
||||
def make_function36(self, node, is_lambda, nested=1, code_node=None):
|
||||
"""Dump function definition, doc string, and function body in
|
||||
Python version 3.6 and above.
|
||||
"""
|
||||
# MAKE_CLOSURE adds an additional closure slot
|
||||
|
||||
# In Python 3.6 and above stack change again. I understand
|
||||
# 3.7 changes some of those changes, although I don't
|
||||
# see it in this code yet. Yes, it is hard to follow
|
||||
# and I am sure I haven't been able to keep up.
|
||||
|
||||
# Thank you, Python.
|
||||
|
||||
def build_param(ast, name, default, annotation=None):
|
||||
"""build parameters:
|
||||
- handle defaults
|
||||
- handle format tuple parameters
|
||||
"""
|
||||
value = default
|
||||
maybe_show_tree_param_default(self.showast, name, value)
|
||||
if annotation:
|
||||
result = "%s: %s=%s" % (name, annotation, value)
|
||||
else:
|
||||
result = "%s=%s" % (name, value)
|
||||
|
||||
# The below can probably be removed. This is probably
|
||||
# a holdover from days when LOAD_CONST erroneously
|
||||
# didn't handle LOAD_CONST None properly
|
||||
if result[-2:] == "= ": # default was 'LOAD_CONST None'
|
||||
result += "None"
|
||||
|
||||
return result
|
||||
|
||||
# MAKE_FUNCTION_... or MAKE_CLOSURE_...
|
||||
assert node[-1].kind.startswith("MAKE_")
|
||||
|
||||
# Python 3.3+ adds a qualified name at TOS (-1)
|
||||
# moving down the LOAD_LAMBDA instruction
|
||||
lambda_index = -3
|
||||
|
||||
args_node = node[-1]
|
||||
|
||||
annotate_dict = {}
|
||||
|
||||
# Get a list of tree nodes that constitute the values for the "default
|
||||
# parameters"; these are default values that appear before any *, and are
|
||||
# not to be confused with keyword parameters which may appear after *.
|
||||
args_attr = args_node.attr
|
||||
|
||||
if isinstance(args_attr, tuple) or isinstance(args_attr, list):
|
||||
if len(args_attr) == 3:
|
||||
pos_args, kw_args, annotate_argc = args_attr
|
||||
else:
|
||||
pos_args, kw_args, annotate_argc, closure = args_attr
|
||||
|
||||
i = -4
|
||||
kw_pairs = 0
|
||||
if closure:
|
||||
# FIXME: fill in
|
||||
i -= 1
|
||||
if annotate_argc:
|
||||
# Turn into subroutine and DRY with other use
|
||||
annotate_node = node[i]
|
||||
if annotate_node == "expr":
|
||||
annotate_node = annotate_node[0]
|
||||
annotate_name_node = annotate_node[-1]
|
||||
if annotate_node == "dict" and annotate_name_node.kind.startswith(
|
||||
"BUILD_CONST_KEY_MAP"
|
||||
):
|
||||
types = [
|
||||
self.traverse(n, indent="") for n in annotate_node[:-2]
|
||||
]
|
||||
names = annotate_node[-2].attr
|
||||
l = len(types)
|
||||
assert l == len(names)
|
||||
for i in range(l):
|
||||
annotate_dict[names[i]] = types[i]
|
||||
pass
|
||||
pass
|
||||
i -= 1
|
||||
if kw_args:
|
||||
kw_node = node[i]
|
||||
if kw_node == "expr":
|
||||
kw_node = kw_node[0]
|
||||
if kw_node == "dict":
|
||||
kw_pairs = kw_node[-1].attr
|
||||
|
||||
defparams = []
|
||||
# FIXME: DRY with code below
|
||||
default, kw_args, annotate_argc = args_node.attr[0:3]
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert expr_node == "expr", "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(expr_node[0].attr, tuple):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
|
||||
else:
|
||||
defparams = []
|
||||
pass
|
||||
else:
|
||||
default, kw_args, annotate, closure = args_node.attr
|
||||
if default:
|
||||
expr_node = node[0]
|
||||
if node[0] == "pos_arg":
|
||||
expr_node = expr_node[0]
|
||||
assert expr_node == "expr", "expecting mkfunc default node to be an expr"
|
||||
if expr_node[0] == "LOAD_CONST" and isinstance(expr_node[0].attr, tuple):
|
||||
defparams = [repr(a) for a in expr_node[0].attr]
|
||||
elif expr_node[0] in frozenset(("list", "tuple", "dict", "set")):
|
||||
defparams = [self.traverse(n, indent="") for n in expr_node[0][:-1]]
|
||||
else:
|
||||
defparams = []
|
||||
|
||||
i = -4
|
||||
kw_pairs = 0
|
||||
if closure:
|
||||
# FIXME: fill in
|
||||
# annotate = node[i]
|
||||
i -= 1
|
||||
if annotate_argc:
|
||||
# Turn into subroutine and DRY with other use
|
||||
annotate_node = node[i]
|
||||
if annotate_node == "expr":
|
||||
annotate_node = annotate_node[0]
|
||||
annotate_name_node = annotate_node[-1]
|
||||
if annotate_node == "dict" and annotate_name_node.kind.startswith(
|
||||
"BUILD_CONST_KEY_MAP"
|
||||
):
|
||||
types = [self.traverse(n, indent="") for n in annotate_node[:-2]]
|
||||
names = annotate_node[-2].attr
|
||||
l = len(types)
|
||||
assert l == len(names)
|
||||
for i in range(l):
|
||||
annotate_dict[names[i]] = types[i]
|
||||
pass
|
||||
pass
|
||||
i -= 1
|
||||
if kw_args:
|
||||
kw_node = node[i]
|
||||
if kw_node == "expr":
|
||||
kw_node = kw_node[0]
|
||||
if kw_node == "dict":
|
||||
kw_pairs = kw_node[-1].attr
|
||||
pass
|
||||
|
||||
if lambda_index and is_lambda and iscode(node[lambda_index].attr):
|
||||
assert node[lambda_index].kind == "LOAD_LAMBDA"
|
||||
code = node[lambda_index].attr
|
||||
else:
|
||||
code = code_node.attr
|
||||
|
||||
assert iscode(code)
|
||||
scanner_code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
# add defaults values to parameter names
|
||||
argc = code.co_argcount
|
||||
kwonlyargcount = code.co_kwonlyargcount
|
||||
|
||||
paramnames = list(scanner_code.co_varnames[:argc])
|
||||
kwargs = list(scanner_code.co_varnames[argc : argc + kwonlyargcount])
|
||||
|
||||
# defaults are for last n parameters when not in a lambda, thus reverse
|
||||
if not is_lambda:
|
||||
paramnames.reverse()
|
||||
defparams.reverse()
|
||||
|
||||
try:
|
||||
ast = self.build_ast(
|
||||
scanner_code._tokens,
|
||||
scanner_code._customize,
|
||||
is_lambda=is_lambda,
|
||||
noneInNames=("None" in code.co_names),
|
||||
)
|
||||
except (ParserError, ParserError2) as p:
|
||||
self.write(str(p))
|
||||
if not self.tolerate_errors:
|
||||
self.ERROR = p
|
||||
return
|
||||
|
||||
i = len(paramnames) - len(defparams)
|
||||
|
||||
# build parameters
|
||||
params = []
|
||||
if defparams:
|
||||
for i, defparam in enumerate(defparams):
|
||||
params.append(
|
||||
build_param(
|
||||
ast, paramnames[i], defparam, annotate_dict.get(paramnames[i])
|
||||
)
|
||||
)
|
||||
|
||||
for param in paramnames[i + 1 :]:
|
||||
if param in annotate_dict:
|
||||
params.append("%s: %s" % (param, annotate_dict[param]))
|
||||
else:
|
||||
params.append(param)
|
||||
else:
|
||||
for param in paramnames:
|
||||
if param in annotate_dict:
|
||||
params.append("%s: %s" % (param, annotate_dict[param]))
|
||||
else:
|
||||
params.append(param)
|
||||
|
||||
params.reverse() # back to correct order
|
||||
|
||||
if code_has_star_arg(code):
|
||||
star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if star_arg in annotate_dict:
|
||||
params.append("*%s: %s" % (star_arg, annotate_dict[star_arg]))
|
||||
else:
|
||||
params.append("*%s" % star_arg)
|
||||
|
||||
argc += 1
|
||||
|
||||
# dump parameter list (with default values)
|
||||
if is_lambda:
|
||||
self.write("lambda ", ", ".join(params))
|
||||
# If the last statement is None (which is the
|
||||
# same thing as "return None" in a lambda) and the
|
||||
# next to last statement is a "yield". Then we want to
|
||||
# drop the (return) None since that was just put there
|
||||
# to have something to after the yield finishes.
|
||||
# FIXME: this is a bit hoaky and not general
|
||||
if (
|
||||
len(ast) > 1
|
||||
and self.traverse(ast[-1]) == "None"
|
||||
and self.traverse(ast[-2]).strip().startswith("yield")
|
||||
):
|
||||
del ast[-1]
|
||||
# Now pick out the expr part of the last statement
|
||||
ast_expr = ast[-1]
|
||||
while ast_expr.kind != "expr":
|
||||
ast_expr = ast_expr[0]
|
||||
ast[-1] = ast_expr
|
||||
pass
|
||||
else:
|
||||
self.write("(", ", ".join(params))
|
||||
# self.println(indent, '#flags:\t', int(code.co_flags))
|
||||
|
||||
ends_in_comma = False
|
||||
if kwonlyargcount > 0:
|
||||
if not (4 & code.co_flags):
|
||||
if argc > 0:
|
||||
self.write(", *, ")
|
||||
else:
|
||||
self.write("*, ")
|
||||
pass
|
||||
ends_in_comma = True
|
||||
else:
|
||||
if argc > 0:
|
||||
self.write(", ")
|
||||
ends_in_comma = True
|
||||
|
||||
ann_dict = kw_dict = default_tup = None
|
||||
fn_bits = node[-1].attr
|
||||
# Skip over:
|
||||
# MAKE_FUNCTION,
|
||||
# optional docstring
|
||||
# LOAD_CONST qualified name,
|
||||
# LOAD_CONST code object
|
||||
index = -5 if node[-2] == "docstring" else -4
|
||||
if fn_bits[-1]:
|
||||
index -= 1
|
||||
if fn_bits[-2]:
|
||||
ann_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-3]:
|
||||
kw_dict = node[index]
|
||||
index -= 1
|
||||
if fn_bits[-4]:
|
||||
default_tup = node[index]
|
||||
|
||||
if kw_dict == "expr":
|
||||
kw_dict = kw_dict[0]
|
||||
|
||||
kw_args = [None] * kwonlyargcount
|
||||
|
||||
# FIXME: handle free_tup, ann_dict, and default_tup
|
||||
if kw_dict:
|
||||
assert kw_dict == "dict"
|
||||
defaults = [self.traverse(n, indent="") for n in kw_dict[:-2]]
|
||||
names = eval(self.traverse(kw_dict[-2]))
|
||||
assert len(defaults) == len(names)
|
||||
sep = ""
|
||||
# FIXME: possibly handle line breaks
|
||||
for i, n in enumerate(names):
|
||||
idx = kwargs.index(n)
|
||||
if annotate_dict and n in annotate_dict:
|
||||
t = "%s: %s=%s" % (n, annotate_dict[n], defaults[i])
|
||||
else:
|
||||
t = "%s=%s" % (n, defaults[i])
|
||||
kw_args[idx] = t
|
||||
pass
|
||||
pass
|
||||
# handle others
|
||||
other_kw = [c == None for c in kw_args]
|
||||
|
||||
for i, flag in enumerate(other_kw):
|
||||
if flag:
|
||||
n = kwargs[i]
|
||||
if n in annotate_dict:
|
||||
kw_args[i] = "%s: %s" % (n, annotate_dict[n])
|
||||
else:
|
||||
kw_args[i] = "%s" % n
|
||||
|
||||
self.write(", ".join(kw_args))
|
||||
ends_in_comma = False
|
||||
pass
|
||||
else:
|
||||
if argc == 0:
|
||||
ends_in_comma = True
|
||||
|
||||
if code_has_star_star_arg(code):
|
||||
if not ends_in_comma:
|
||||
self.write(", ")
|
||||
star_star_arg = code.co_varnames[argc + kwonlyargcount]
|
||||
if annotate_dict and star_star_arg in annotate_dict:
|
||||
self.write("**%s: %s" % (star_star_arg, annotate_dict[star_star_arg]))
|
||||
else:
|
||||
self.write("**%s" % star_star_arg)
|
||||
|
||||
if is_lambda:
|
||||
self.write(": ")
|
||||
else:
|
||||
self.write(")")
|
||||
if annotate_dict and "return" in annotate_dict:
|
||||
self.write(" -> %s" % annotate_dict["return"])
|
||||
self.println(":")
|
||||
|
||||
if node[-2] == "docstring" and not is_lambda:
|
||||
# docstring exists, dump it
|
||||
self.println(self.traverse(node[-2]))
|
||||
|
||||
scanner_code._tokens = None # save memory
|
||||
assert ast == "stmts"
|
||||
|
||||
all_globals = find_all_globals(ast, set())
|
||||
globals, nonlocals = find_globals_and_nonlocals(
|
||||
ast, set(), set(), code, self.version
|
||||
)
|
||||
|
||||
for g in sorted((all_globals & self.mod_globs) | globals):
|
||||
self.println(self.indent, "global ", g)
|
||||
|
||||
for nl in sorted(nonlocals):
|
||||
self.println(self.indent, "nonlocal ", nl)
|
||||
|
||||
self.mod_globs -= all_globals
|
||||
has_none = "None" in code.co_names
|
||||
rn = has_none and not find_none(ast)
|
||||
self.gen_source(
|
||||
ast, code.co_name, scanner_code._customize, is_lambda=is_lambda, returnNone=rn
|
||||
)
|
||||
|
||||
scanner_code._tokens = None
|
||||
scanner_code._customize = None # save memory
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2015-2019 by Rocky Bernstein
|
||||
# Copyright (c) 2015-2020 by Rocky Bernstein
|
||||
# Copyright (c) 2005 by Dan Pascu <dan@windowmaker.org>
|
||||
# Copyright (c) 2000-2002 by hartmut Goebel <h.goebel@crazy-compilers.com>
|
||||
# Copyright (c) 1999 John Aycock
|
||||
@@ -137,12 +137,15 @@ from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from spark_parser import GenericASTTraversal, DEFAULT_DEBUG as PARSER_DEFAULT_DEBUG
|
||||
from uncompyle6.scanner import Code, get_scanner
|
||||
import uncompyle6.parser as python_parser
|
||||
from uncompyle6.semantics.make_function import make_function2, make_function3
|
||||
from uncompyle6.semantics.make_function2 import make_function2
|
||||
from uncompyle6.semantics.make_function3 import make_function3
|
||||
from uncompyle6.semantics.make_function36 import make_function36
|
||||
from uncompyle6.semantics.parser_error import ParserError
|
||||
from uncompyle6.semantics.check_ast import checker
|
||||
from uncompyle6.semantics.customize import customize_for_version
|
||||
from uncompyle6.semantics.helper import (
|
||||
print_docstring,
|
||||
find_code_node,
|
||||
find_globals_and_nonlocals,
|
||||
flatten_list,
|
||||
)
|
||||
@@ -820,6 +823,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
else:
|
||||
self.write(iname, " as ", sname)
|
||||
self.prune() # stop recursing
|
||||
n_alias37 = n_alias
|
||||
|
||||
def n_import_from(self, node):
|
||||
relative_path_index = 0
|
||||
@@ -840,23 +844,9 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
def n_mkfunc(self, node):
|
||||
|
||||
if self.version >= 3.3 or node[-2] in ("kwargs", "no_kwargs"):
|
||||
# LOAD_CODET code object ..
|
||||
# LOAD_CONST "x0" if >= 3.3
|
||||
# MAKE_FUNCTION ..
|
||||
code_node = node[-3]
|
||||
elif node[-2] == "expr":
|
||||
code_node = node[-2][0]
|
||||
else:
|
||||
# LOAD_CODE code object ..
|
||||
# MAKE_FUNCTION ..
|
||||
code_node = node[-2]
|
||||
|
||||
assert iscode(code_node.attr)
|
||||
|
||||
func_name = code_node.attr.co_name
|
||||
self.write(func_name)
|
||||
|
||||
code_node = find_code_node(node, -2)
|
||||
code = code_node.attr
|
||||
self.write(code.co_name)
|
||||
self.indent_more()
|
||||
|
||||
self.make_function(node, is_lambda=False, code_node=code_node)
|
||||
@@ -868,11 +858,15 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.indent_less()
|
||||
self.prune() # stop recursing
|
||||
|
||||
# Python changes make function this much that we need at least 3 different routines,
|
||||
# and probably more in the future.
|
||||
def make_function(self, node, is_lambda, nested=1, code_node=None, annotate=None):
|
||||
if self.version >= 3.0:
|
||||
make_function3(self, node, is_lambda, nested, code_node)
|
||||
else:
|
||||
if self.version <= 2.7:
|
||||
make_function2(self, node, is_lambda, nested, code_node)
|
||||
elif 3.0 <= self.version <= 3.5:
|
||||
make_function3(self, node, is_lambda, nested, code_node)
|
||||
elif self.version >= 3.6:
|
||||
make_function36(self, node, is_lambda, nested, code_node)
|
||||
|
||||
def n_docstring(self, node):
|
||||
|
||||
@@ -1243,6 +1237,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
|
||||
# Iterate to find the innermost store
|
||||
# We'll come back to the list iteration below.
|
||||
|
||||
while n in ("list_iter", "comp_iter"):
|
||||
# iterate one nesting deeper
|
||||
if self.version == 3.0 and len(n) == 3:
|
||||
@@ -1255,13 +1250,18 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if n[2] == "store" and not store:
|
||||
store = n[2]
|
||||
n = n[3]
|
||||
elif n in ("list_if", "list_if_not", "comp_if", "comp_if_not"):
|
||||
have_not = n in ("list_if_not", "comp_if_not")
|
||||
if_node = n[0]
|
||||
if n[1] == "store":
|
||||
store = n[1]
|
||||
n = n[2]
|
||||
pass
|
||||
elif n in ("list_if", "list_if_not",
|
||||
"list_if37", "list_if37_not",
|
||||
"comp_if", "comp_if_not"):
|
||||
have_not = n in ("list_if_not", "comp_if_not", "list_if37_not")
|
||||
if n in ("list_if37", "list_if37_not"):
|
||||
n = n[1]
|
||||
else:
|
||||
if_node = n[0]
|
||||
if n[1] == "store":
|
||||
store = n[1]
|
||||
n = n[2]
|
||||
pass
|
||||
pass
|
||||
|
||||
# Python 2.7+ starts including set_comp_body
|
||||
@@ -1270,6 +1270,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
if self.version != 3.0:
|
||||
assert n.kind in (
|
||||
"lc_body",
|
||||
"list_if37",
|
||||
"comp_body",
|
||||
"set_comp_func",
|
||||
"set_comp_body",
|
||||
@@ -1376,7 +1377,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
list_if = n
|
||||
else:
|
||||
list_if = n[1]
|
||||
n = n[2]
|
||||
n = n[-1]
|
||||
pass
|
||||
elif n == "list_if37":
|
||||
list_ifs.append(n)
|
||||
n = n[-1]
|
||||
pass
|
||||
pass
|
||||
|
||||
@@ -1888,6 +1893,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
self.prune()
|
||||
return
|
||||
|
||||
if node[0] == "UNPACK_SEQUENCE_0":
|
||||
self.write("[]")
|
||||
self.prune()
|
||||
return
|
||||
|
||||
for n in node[1:]:
|
||||
if n[0].kind == "unpack":
|
||||
n[0].kind = "unpack_w_parens"
|
||||
@@ -2073,6 +2083,7 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
try:
|
||||
self.write(eval(expr, d, d))
|
||||
except:
|
||||
from trepan.api import debug; debug()
|
||||
raise
|
||||
m = escape.search(fmt, i)
|
||||
self.write(fmt[i:])
|
||||
@@ -2210,7 +2221,11 @@ class SourceWalker(GenericASTTraversal, object):
|
||||
def build_class(self, code):
|
||||
"""Dump class definition, doc string and class body."""
|
||||
|
||||
assert iscode(code)
|
||||
try:
|
||||
assert iscode(code)
|
||||
except:
|
||||
from trepan.api import debug; debug()
|
||||
|
||||
self.classes.append(self.currentclass)
|
||||
code = Code(code, self.scanner, self.currentclass)
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
# Copyright (c) 2019 by Rocky Bernstein
|
||||
# Copyright (c) 2019-2020 by Rocky Bernstein
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
@@ -13,10 +13,12 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from xdis.code import iscode
|
||||
from uncompyle6.show import maybe_show_tree
|
||||
from copy import copy
|
||||
from spark_parser import GenericASTTraversal, GenericASTTraversalPruningException
|
||||
|
||||
from uncompyle6.semantics.helper import find_code_node
|
||||
from uncompyle6.parsers.treenode import SyntaxTree
|
||||
from uncompyle6.scanners.tok import Token
|
||||
from uncompyle6.semantics.consts import RETURN_NONE
|
||||
@@ -30,8 +32,7 @@ def is_docstring(node):
|
||||
|
||||
|
||||
class TreeTransform(GenericASTTraversal, object):
|
||||
def __init__(self, version, show_ast=None,
|
||||
is_pypy=False):
|
||||
def __init__(self, version, show_ast=None, is_pypy=False):
|
||||
self.version = version
|
||||
self.showast = show_ast
|
||||
self.is_pypy = is_pypy
|
||||
@@ -67,6 +68,29 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
node[i] = self.preorder(kid)
|
||||
return node
|
||||
|
||||
def n_mkfunc(self, node):
|
||||
"""If the function has a docstring (this is found in the code
|
||||
constants), pull that out and make it part of the syntax
|
||||
tree. When generating the source string that AST node rather
|
||||
than the code field is seen and used.
|
||||
"""
|
||||
|
||||
code = find_code_node(node, -2).attr
|
||||
|
||||
if (
|
||||
node[-1].pattr != "closure"
|
||||
and len(code.co_consts) > 0
|
||||
and code.co_consts[0] is not None
|
||||
):
|
||||
docstring_node = SyntaxTree(
|
||||
"docstring", [Token("LOAD_STR", has_arg=True, pattr=code.co_consts[0])]
|
||||
)
|
||||
docstring_node.transformed_by = "n_mkfunc"
|
||||
node = SyntaxTree("mkfunc", node[:-1] + [docstring_node, node[-1]])
|
||||
node.transformed_by = "n_mkfunc"
|
||||
|
||||
return node
|
||||
|
||||
def n_ifstmt(self, node):
|
||||
"""Here we check if we can turn an `ifstmt` or 'iflaststmtl` into
|
||||
some kind of `assert` statement"""
|
||||
@@ -128,7 +152,13 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
expr = call[1][0]
|
||||
node = SyntaxTree(
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, expr, RAISE_VARARGS_1]
|
||||
[
|
||||
assert_expr,
|
||||
jump_cond,
|
||||
LOAD_ASSERT,
|
||||
expr,
|
||||
RAISE_VARARGS_1,
|
||||
],
|
||||
)
|
||||
pass
|
||||
pass
|
||||
@@ -157,10 +187,9 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
|
||||
LOAD_ASSERT = expr[0]
|
||||
node = SyntaxTree(
|
||||
kind,
|
||||
[assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
|
||||
kind, [assert_expr, jump_cond, LOAD_ASSERT, RAISE_VARARGS_1]
|
||||
)
|
||||
node.transformed_by="n_ifstmt",
|
||||
node.transformed_by = ("n_ifstmt",)
|
||||
pass
|
||||
pass
|
||||
return node
|
||||
@@ -171,7 +200,9 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
# if elif elif
|
||||
def n_ifelsestmt(self, node, preprocess=False):
|
||||
"""
|
||||
Here we turn:
|
||||
Transformation involving if..else statments.
|
||||
For example
|
||||
|
||||
|
||||
if ...
|
||||
else
|
||||
@@ -184,7 +215,7 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
|
||||
[else ...]
|
||||
|
||||
where appropriate
|
||||
where appropriate.
|
||||
"""
|
||||
else_suite = node[3]
|
||||
|
||||
@@ -265,6 +296,28 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
list_for_node.transformed_by = ("n_list_for",)
|
||||
return list_for_node
|
||||
|
||||
def n_stmts(self, node):
|
||||
if node.first_child() == "SETUP_ANNOTATIONS":
|
||||
prev = node[0][0][0]
|
||||
new_stmts = [node[0]]
|
||||
for i, sstmt in enumerate(node[1:]):
|
||||
ann_assign = sstmt[0][0]
|
||||
if (sstmt[0] == "stmt" and ann_assign == "ann_assign" and prev == "assign"):
|
||||
annotate_var = ann_assign[-2]
|
||||
if annotate_var.attr == prev[-1][0].attr:
|
||||
del new_stmts[-1]
|
||||
sstmt[0][0] = SyntaxTree(
|
||||
"ann_assign_init",
|
||||
[ann_assign[0], prev[0], annotate_var])
|
||||
sstmt[0][0].transformed_by="n_stmts"
|
||||
pass
|
||||
pass
|
||||
new_stmts.append(sstmt)
|
||||
prev = ann_assign
|
||||
pass
|
||||
node.data = new_stmts
|
||||
return node
|
||||
|
||||
def traverse(self, node, is_lambda=False):
|
||||
node = self.preorder(node)
|
||||
return node
|
||||
@@ -274,9 +327,30 @@ class TreeTransform(GenericASTTraversal, object):
|
||||
self.ast = copy(ast)
|
||||
self.ast = self.traverse(self.ast, is_lambda=False)
|
||||
|
||||
if self.ast[-1] == RETURN_NONE:
|
||||
self.ast.pop() # remove last node
|
||||
# todo: if empty, add 'pass'
|
||||
try:
|
||||
for i in range(len(self.ast)):
|
||||
if is_docstring(self.ast[i]):
|
||||
docstring_ast = SyntaxTree(
|
||||
"docstring",
|
||||
[
|
||||
Token(
|
||||
"LOAD_STR",
|
||||
has_arg=True,
|
||||
offset=0,
|
||||
pattr=self.ast[i][0][0][0][0].attr,
|
||||
)
|
||||
],
|
||||
transformed_by="transform",
|
||||
)
|
||||
del self.ast[i]
|
||||
self.ast.insert(0, docstring_ast)
|
||||
break
|
||||
|
||||
if self.ast[-1] == RETURN_NONE:
|
||||
self.ast.pop() # remove last node
|
||||
# todo: if empty, add 'pass'
|
||||
except:
|
||||
pass
|
||||
|
||||
return self.ast
|
||||
|
||||
|
@@ -12,4 +12,4 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
# This file is suitable for sourcing inside bash as
|
||||
# well as importing into Python
|
||||
VERSION="3.6.1" # noqa
|
||||
VERSION="3.6.2" # noqa
|
||||
|
Reference in New Issue
Block a user